From 14d1132acce528bebb9a7ca0511a190d306bfb61 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Wed, 26 Mar 2025 17:23:37 +0100 Subject: [PATCH 01/20] Create geos-pv package with geosLogReader --- geos-pv/pyproject.toml | 33 + geos-pv/setup.py | 11 + geos-pv/src/PVplugins/PVGeosLogReader.py | 624 ++++++++++ geos-pv/src/geos_pv/__init__.py | 0 .../GeosLogReaderAquifers.py | 179 +++ .../GeosLogReaderConvergence.py | 131 +++ .../geosLogReaderUtils/GeosLogReaderFlow.py | 224 ++++ .../geosLogReaderUtils/GeosLogReaderWells.py | 293 +++++ .../geosLogReaderFunctions.py | 1048 +++++++++++++++++ geos-pv/src/geos_pv/py.typed | 0 geos-pv/src/geos_pv/utils/__init__.py | 0 geos-pv/src/geos_pv/utils/checkboxFunction.py | 22 + .../src/geos_pv/utils/paraviewTreatments.py | 608 ++++++++++ 13 files changed, 3173 insertions(+) create mode 100644 geos-pv/pyproject.toml create mode 100644 geos-pv/setup.py create mode 100644 geos-pv/src/PVplugins/PVGeosLogReader.py create mode 100644 geos-pv/src/geos_pv/__init__.py create mode 100644 geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py create mode 100644 geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py create mode 100644 geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py create mode 100644 geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py create mode 100644 geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py create mode 100644 geos-pv/src/geos_pv/py.typed create mode 100644 geos-pv/src/geos_pv/utils/__init__.py create mode 100644 geos-pv/src/geos_pv/utils/checkboxFunction.py create mode 100644 geos-pv/src/geos_pv/utils/paraviewTreatments.py diff --git a/geos-pv/pyproject.toml b/geos-pv/pyproject.toml new file mode 100644 index 00000000..1328fedf --- /dev/null +++ b/geos-pv/pyproject.toml @@ -0,0 +1,33 @@ +[build-system] +requires = ["setuptools>=42", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "geos-pv" +version = "0.1.0" +description = "geos-pv is a Python package that gathers Paraview plugins and dedicated utils to process and visualize GEOS inputs and outputs." +authors = [{name = "GEOS Contributors" }] +maintainers = [ + {name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"} +] +license = {text = "Apache-2.0"} +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python" +] + +requires-python = ">=3.9" + +dependencies = [ + "typing_extensions", + "pandas", + "numpy", +] + +[project.scripts] + + +[tool.mypy] +python_version = "3.9" +warn_return_any = true +warn_unused_configs = true \ No newline at end of file diff --git a/geos-pv/setup.py b/geos-pv/setup.py new file mode 100644 index 00000000..70b545a8 --- /dev/null +++ b/geos-pv/setup.py @@ -0,0 +1,11 @@ +from pathlib import Path +from setuptools import setup + +# This is where you add any fancy path resolution to the local lib: +local_path: str = (Path(__file__).parent).as_uri() + +setup( + install_requires=[ + f"geos-utils @ {local_path}", + ] +) \ No newline at end of file diff --git a/geos-pv/src/PVplugins/PVGeosLogReader.py b/geos-pv/src/PVplugins/PVGeosLogReader.py new file mode 100644 index 00000000..c583609a --- /dev/null +++ b/geos-pv/src/PVplugins/PVGeosLogReader.py @@ -0,0 +1,624 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +# ruff: noqa: E402 # disable Module level import not at top of file +import os +import sys +from enum import Enum +from typing import Union, cast + +import numpy as np +import numpy.typing as npt +import pandas as pd # type: ignore[import-untyped] +from typing_extensions import Self + +dir_path = os.path.dirname(os.path.realpath(__file__)) +parent_dir_path = os.path.dirname(dir_path) +if parent_dir_path not in sys.path: + sys.path.append(parent_dir_path) + +import vtkmodules.util.numpy_support as vnp +from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] + VTKPythonAlgorithmBase, + smdomain, + smhint, + smproperty, + smproxy, +) +from vtk import VTK_DOUBLE # type: ignore[import-untyped] +from vtkmodules.vtkCommonCore import vtkDataArraySelection as vtkDAS +from vtkmodules.vtkCommonCore import ( + vtkDoubleArray, + vtkInformation, + vtkInformationVector, +) +from vtkmodules.vtkCommonDataModel import vtkTable + +from geos_pv.geosLogReaderUtils.geosLogReaderFunctions import ( + identifyProperties, + transformUserChoiceToListPhases, +) +from geos_pv.geosLogReaderUtils.GeosLogReaderAquifers import GeosLogReaderAquifers +from geos_pv.geosLogReaderUtils.GeosLogReaderConvergence import GeosLogReaderConvergence +from geos_pv.geosLogReaderUtils.GeosLogReaderFlow import GeosLogReaderFlow +from geos_pv.geosLogReaderUtils.GeosLogReaderWells import GeosLogReaderWells +from geos_utils.enumUnits import ( + Mass, + MassRate, + Pressure, + Time, + Unit, + Volume, + VolumetricRate, + enumerationDomainUnit, +) +from geos_utils.UnitRepository import UnitRepository +from geos_pv.utils.checkboxFunction import ( # type: ignore[attr-defined] + createModifiedCallback, +) +from geos_pv.utils.paraviewTreatments import ( + strListToEnumerationDomainXml, +) + +__doc__ = """ +PVGeosLogReader is a Paraview plugin that allows to read Geos output log. + +Input is a file and output is a vtkTable containing log information. + +..WARNING:: + + The reader is compliant with GEOS log before commit version #9365098. + For more recent version, use the csv or hdf5 export options from GEOS. + +To use it: + +* Load the module in Paraview: Tools>Manage Plugins...>Load new>PVGeosLogReader. +* Open (File>Open...) and Select Geos output log .out/.txt file. +* In the "Open data with..." window, Select PVGeosLogReader reader. + +""" + + +@smproxy.reader( + name="PVGeosLogReader", + label="Geos Log Reader", + extensions=["txt", "out"], + file_description="GEOS log .txt or .out files", +) +class PVGeosLogReader(VTKPythonAlgorithmBase): + def __init__(self: Self) -> None: + """Paraview reader for Geos log files ."txt" or ".out". + + Output is a vtkTable with data extracted from the log. + """ + super().__init__(nInputPorts=0, nOutputPorts=1, outputType="vtkTable") + self.m_filepath: str = "" + self.m_phasesUserChoice: list[str] = [] + self.m_dataframeChoice: int = 0 + self.m_dataframe: pd.DataFrame + self.m_numberWellsMean: int = 1 + + # checkboxes values + self.m_useSIUnits: int = 0 + self.m_pressureUnit: int = 0 + self.m_bhpUnit: int = 0 + self.m_stressUnit: int = 0 + self.m_timeUnit: int = 0 + self.m_massUnit: int = 0 + self.m_volumeUnit: int = 0 + self.m_volumetricRateUnit: int = 0 + self.m_massRateUnit: int = 0 + self.m_densityUnit: int = 0 + + # for selection of properties + self.m_propertiesFlow: vtkDAS = vtkDAS() + self.m_propertiesFlow.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] + propsFlow: list[str] = [ + "DeltaPressure", + "Pressure", + "Temperature", + "TotalDynamicPoreVolume", + "DynamicPoreVolumes", + "NonTrapped", + "Trapped", + "Immobile", + "Mobile", + "Dissolved", + "TotalFluidMass", + "CellFluidMass", + ] + for prop in propsFlow: + self.m_propertiesFlow.AddArray(prop) + + self.m_propertiesWells: vtkDAS = vtkDAS() + self.m_propertiesWells.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] + propsWells: list[str] = [ + "MeanBHP", + "MeanTotalMassRate", + "MeanTotalVolumetricRate", + "MeanSurfaceVolumetricRate", + "TotalMassRate", + "TotalVolumetricRate", + "SurfaceVolumetricRate", + "Mass", + "BHP", + ] + for prop in propsWells: + self.m_propertiesWells.AddArray(prop) + + self.m_propertiesAquifers: vtkDAS = vtkDAS() + self.m_propertiesAquifers.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] + propsAquifers: list[str] = [ + "Volume", + "VolumetricRate", + "CumulatedVolume", + "CumulatedVolumetricRate", + ] + for prop in propsAquifers: + self.m_propertiesAquifers.AddArray(prop) + + self.m_convergence: vtkDAS = vtkDAS() + self.m_convergence.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] + propsSolvers: list[str] = ["NewtonIter", "LinearIter"] + for prop in propsSolvers: + self.m_convergence.AddArray(prop) + + @smproperty.stringvector( + name="DataFilepath", default_values="Enter a filepath to your data" + ) + @smdomain.filelist() + @smhint.filechooser(extensions=["txt", "out"], file_description="Data files") + def a01SetFilepath(self: Self, filepath: str) -> None: + """Set Geos log file path. + + Args: + filepath (str): path to the file. + + Raises: + FileNotFoundError: file not found. + """ + if filepath != "Enter a filepath to your data": + if not os.path.exists(filepath): + raise FileNotFoundError(f"Invalid filepath {filepath}") + else: + self.m_filepath = filepath + self.Modified() + + def getFilepath(self: Self) -> str: + """Get Geos log file path. + + Returns: + str: filepath. + """ + return self.m_filepath + + @smproperty.stringvector( + name="EnterPhaseNames", label="Enter Phase Names", default_values="" + ) + @smdomain.xml( + """ + Please enter the phase names as in the form: phase0, phase1, phase2 + """ + ) + def a02SetPhaseNames(self: Self, value: str) -> None: + """Set phase names. + + Args: + value (str): list of phase names separated by space. + """ + self.m_phasesUserChoice = transformUserChoiceToListPhases(value) + self.Modified() + + def getPhasesUserChoice(self: Self) -> list[str]: + """Access the phases from the user input. + + Returns: + list[str]: phase names. + """ + return self.m_phasesUserChoice + + @smproperty.intvector( + name="DataframeChoice", + number_of_elements=1, + label="DataframeChoice", + default_values=0, + ) + @smdomain.xml( + strListToEnumerationDomainXml(["Flow", "Wells", "Aquifers", "Convergence"]) + ) + def a03SetDataFrameChoice(self: Self, value: int) -> None: + """Set reader choice: 0:Flow, 1:Wells, 2:Aquifers, 3:Convergence. + + Args: + value (int): user choice. + """ + self.m_dataframeChoice = value + self.Modified() + + def getDataframeChoice(self: Self) -> int: + """Accesses the choice of dataframe from the user. + + Returns: + int: The value corresponding to a certain dataframe. + "Flow" has value "0", "Wells" has value "1", + "Aquifers" has value "2", "Convergence" has + value "3". + """ + return self.m_dataframeChoice + + @smproperty.xml( + """ + + + + """ + ) + def a04PropertyGroup(self: Self) -> None: + """Organized group.""" + self.Modified() + + @smproperty.dataarrayselection(name="FlowProperties") + def a05SetPropertiesFlow(self: Self) -> vtkDAS: + """Use Flow.""" + return self.m_propertiesFlow + + @smproperty.xml( + """ + + + """ + ) + def a06GroupFlow(self: Self) -> None: + """Organized group.""" + self.Modified() + + @smproperty.dataarrayselection(name="WellsProperties") + def a07SetPropertiesWells(self: Self) -> vtkDAS: + """Use wells.""" + return self.m_propertiesWells + + @smproperty.intvector(name="NumberOfWellsForMeanCalculation", default_values=1) + def a08SetTheNumberOfWellsMean(self: Self, number: int) -> None: + """Set number of wells. + + Args: + number (int): number of wells. + """ + self.m_numberWellsMean = number + self.Modified() + + def getNumberOfWellsMean(self: Self) -> int: + """Get the number of wells. + + Returns: + int: The number of wells to consider. + """ + return self.m_numberWellsMean + + @smproperty.xml( + """ + + + + """ + ) + def a09GroupWells(self: Self) -> None: + """Organized group.""" + self.Modified() + + @smproperty.dataarrayselection(name="AquifersProperties") + def a10SetPropertiesAquifers(self: Self) -> vtkDAS: + """Use aquifers.""" + return self.m_propertiesAquifers + + @smproperty.xml( + """ + + + """ + ) + def a11GroupAquifers(self: Self) -> None: + """Organized group.""" + self.Modified() + + @smproperty.dataarrayselection(name="Convergence") + def a12SetConvergence(self: Self) -> vtkDAS: + """Use convergence.""" + return self.m_convergence + + @smproperty.xml( + """ + + + """ + ) + def a13GroupSolvers(self: Self) -> None: + """Organized group.""" + self.Modified() + + def getIdsToUse(self: Self) -> list[str]: + """Get property ids. + + Using the checkbox choices of the user for metaproperties, + we get the list of ids to map the dataframe properties with the + properties. + + Returns: + list(str): Ids of the metaproperties. + """ + dataArrays: dict[int, vtkDAS] = { + 0: self.m_propertiesFlow, + 1: self.m_propertiesWells, + 2: self.m_propertiesAquifers, + 3: self.m_convergence, + } + dataArrayToUse = dataArrays[self.getDataframeChoice()] + propertyNames: list[str] = [] + for i in range(dataArrayToUse.GetNumberOfArrays()): + propName: str = dataArrayToUse.GetArrayName(i) + if dataArrayToUse.ArrayIsEnabled(propName) == 1: + propertyNames.append(propName) + propertiesWithId: list[str] = identifyProperties(propertyNames) + onlyIds: list[str] = [] + for propId in propertiesWithId: + idFound: str = propId.split(":")[0] + onlyIds.append(idFound) + return onlyIds + + @smproperty.intvector(name="UseSIUnits", label="UseSIUnits", default_values=1) + @smdomain.xml("""""") + def b01SetUseSIUnits(self: Self, value: int) -> None: + """Set Use SI Units. + + Args: + value (int): user choice. + """ + self.m_useSIUnits = value + self.Modified() + + @smproperty.intvector( + name="Pressure", label="Pressure", default_values=0, panel_visibility="default" + ) + @smdomain.xml(enumerationDomainUnit(cast(Enum, Pressure))) + def b02SetPressureUnit(self: Self, value: int) -> None: + """Set pressure unit. + + Args: + value (int): user choice. + """ + self.m_pressureUnit = value + self.Modified() + + @smproperty.intvector( + name="BHP", label="BHP", default_values=0, panel_visibility="default" + ) + @smdomain.xml(enumerationDomainUnit(cast(Enum, Pressure))) + def b03SetBHPUnit(self: Self, value: int) -> None: + """Set BHP unit. + + Args: + value (int): user choice. + """ + self.m_bhpUnit = value + self.Modified() + + @smproperty.intvector( + name="Time", label="Time", default_values=0, panel_visibility="default" + ) + @smdomain.xml(enumerationDomainUnit(cast(Enum, Time))) + def b04SetTimeUnit(self: Self, value: int) -> None: + """Set time unit. + + Args: + value (int): user choice. + """ + self.m_timeUnit = value + self.Modified() + + @smproperty.intvector( + name="Mass", label="Mass", default_values=0, panel_visibility="default" + ) + @smdomain.xml(enumerationDomainUnit(cast(Enum, Mass))) + def b05SetMassUnit(self: Self, value: int) -> None: + """Set mass unit. + + Args: + value (int): user choice. + """ + self.m_massUnit = value + self.Modified() + + @smproperty.intvector( + name="Volume", label="Volume", default_values=0, panel_visibility="default" + ) + @smdomain.xml(enumerationDomainUnit(cast(Enum, Volume))) + def b06SetVolumeUnit(self: Self, value: int) -> None: + """Set volume unit. + + Args: + value (int): user choice. + """ + self.m_volumeUnit = value + self.Modified() + + @smproperty.intvector( + name="VolumetricRate", + label="VolumetricRate", + default_values=0, + panel_visibility="default", + ) + @smdomain.xml(enumerationDomainUnit(cast(Enum, VolumetricRate))) + def b07SetVolumetricRateUnit(self: Self, value: int) -> None: + """Set volumetric rate unit. + + Args: + value (int): user choice. + """ + self.m_volumetricRateUnit = value + self.Modified() + + @smproperty.intvector( + name="MassRate", label="MassRate", default_values=0, panel_visibility="default" + ) + @smdomain.xml(enumerationDomainUnit(cast(Enum, MassRate))) + def b08SetMassRateUnit(self: Self, value: int) -> None: + """Set Mass rate unit. + + Args: + value (int): user choice. + """ + """""" + self.m_massRateUnit = value + self.Modified() + + @smproperty.xml( + """ + + + + + + + + + """ + ) + def b09GroupUnitsToUse(self: Self) -> None: + """Organize group.""" + self.Modified() + + def getUseSIUnits(self: Self) -> int: + """Acess the choice to use SI units or not. + + Returns: + int: 0 to not use SI units or 1 to use SI units. + """ + return self.m_useSIUnits + + def getUnitChoices(self: Self) -> dict[str, int]: + """Get the units choosen by the user. + + Based on the choice of using SI units or not, and if + not with the units chosen by the user, returns a dict + with metaproperties such as pressure, volume etc ... + with the unit associated. + + Returns: + dict[str, int]: empty dictionary if use SI unit, or + property name as keys and unit choice as values. + """ + unitChoices: dict[str, int] = {} + if not self.getUseSIUnits(): + unitChoices = { + "pressure": self.m_pressureUnit, + "stress": self.m_stressUnit, + "bhp": self.m_bhpUnit, + "mass": self.m_massUnit, + "massRate": self.m_massRateUnit, + "time": self.m_timeUnit, + "volume": self.m_volumeUnit, + "volumetricRate": self.m_volumetricRateUnit, + "density": self.m_densityUnit, + } + return unitChoices + + def createDataframe(self: Self) -> pd.DataFrame: + """Create dataframe with values from Geos log based on user choices. + + Returns: + pd.DataFrame: Dataframe with log values according to user choice. + """ + filepath: str = self.getFilepath() + phaseNames: list[str] = self.getPhasesUserChoice() + choice: int = self.getDataframeChoice() + userPropertiesUnits: dict[str, int] = self.getUnitChoices() + unitObj: UnitRepository = UnitRepository(userPropertiesUnits) + propertiesUnit: dict[str, Unit] = unitObj.getPropertiesUnit() + reader: Union[ + GeosLogReaderFlow, + GeosLogReaderWells, + GeosLogReaderAquifers, + GeosLogReaderConvergence, + ] + if choice == 0: + reader = GeosLogReaderFlow(filepath, propertiesUnit, phaseNames) + elif choice == 1: + nbrWells: int = self.getNumberOfWellsMean() + reader = GeosLogReaderWells(filepath, propertiesUnit, phaseNames, nbrWells) + elif choice == 2: + reader = GeosLogReaderAquifers(filepath, propertiesUnit) + elif choice == 3: + reader = GeosLogReaderConvergence(filepath, propertiesUnit) + return reader.createDataframe() + + def RequestInformation( + self: Self, + request: vtkInformation, # noqa: F841 + inInfoVec: list[vtkInformationVector], # noqa: F841 + outInfoVec: vtkInformationVector, + ) -> int: + """Inherited from VTKPythonAlgorithmBase::RequestInformation. + + Args: + request (vtkInformation): request + inInfoVec (list[vtkInformationVector]): input objects + outInfoVec (vtkInformationVector): output objects + + Returns: + int: 1 if calculation successfully ended, 0 otherwise. + """ + executive = self.GetExecutive() + outInfo = outInfoVec.GetInformationObject(0) + outInfo.Remove(executive.TIME_STEPS()) + outInfo.Remove(executive.TIME_RANGE()) + return 1 + + def RequestData( + self: Self, + request: vtkInformation, # noqa: F841 + inInfoVec: list[vtkInformationVector], # noqa: F841 + outInfoVec: vtkInformationVector, + ) -> int: + """Inherited from VTKPythonAlgorithmBase::RequestData. + + Args: + request (vtkInformation): request + inInfoVec (list[vtkInformationVector]): input objects + outInfoVec (vtkInformationVector): output objects + + Returns: + int: 1 if calculation successfully ended, 0 otherwise. + """ + try: + # we choose which dataframe to build and get it + idsToUse = self.getIdsToUse() + dataframe = self.createDataframe() + usefulColumns = [] + for column_name in list(dataframe.columns): + if ":" not in column_name: + usefulColumns.append(column_name) + else: + idFound = column_name.split(":")[0] + if idFound in idsToUse: + usefulColumns.append(column_name) + # we build the output vtkTable + output: vtkTable = vtkTable.GetData(outInfoVec, 0) + for column in usefulColumns: + pandas_series: pd.Series = dataframe[column] + array: npt.NDArray[np.float64] = pandas_series.values + if ":" in column: + column = column.split(":")[1] + + newAttr: vtkDoubleArray = vnp.numpy_to_vtk(array, deep=True, array_type=VTK_DOUBLE) # type: ignore[no-untyped-call] + newAttr.SetName(column) + output.AddColumn(newAttr) + except Exception as e: + print("Error while reading Geos log file:") + print(str(e)) + return 0 + return 1 diff --git a/geos-pv/src/geos_pv/__init__.py b/geos-pv/src/geos_pv/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py new file mode 100644 index 00000000..2fae08d4 --- /dev/null +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py @@ -0,0 +1,179 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +from io import TextIOBase + +import pandas as pd # type: ignore[import-untyped] +from typing_extensions import Self + +import geos_pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos_utils.enumUnits import Unit + + +class GeosLogReaderAquifers: + def __init__(self: Self, filepath: str, propertiesUnit: dict[str, Unit]) -> None: + """Reader for Aquifer. + + Args: + filepath (str): path to geos log file. + propertiesUnit ( dict[str, Unit]): unit preferences + """ + self.m_propertiesUnit = propertiesUnit + self.m_aquiferNames: list[str] = [] + self.m_aquifersPropertiesValues: dict[str, list[float]] = {} + self.m_timesteps: list[float] = [] + + toFindInLog: list[str] = ["_pressureInfluence_table", "Time: 0"] + if not fcts.elementsAreInLog(filepath, toFindInLog): + print( + "Invalid Geos log file. Please check that your log" + + " did not crash and contains aquifers." + ) + else: + self.readAll(filepath) + self.calculateExtraValues() + + def readAquiferNames(self: Self, file: TextIOBase) -> tuple[str, int]: + """Initialize the m_aquiferNames attribute by reading log file. + + Args: + file (TextIOBase): Geos Log file + + Returns: + tuple(str, int): The last line with time info read. + The id of the last line read that contained the tag "_pressureInfluence_table"., + which will be the line containing the first positive timestep at 0s. + """ + aquiferNames: list[str] = [] + line: str = file.readline() + id_line = 1 + while not line.startswith("Time: 0"): + if "_pressureInfluence_table" in line: + aquiferName: str = fcts.extractAquifer(line) + aquiferNames.append(aquiferName) + line = file.readline() + id_line += 1 + self.m_aquiferNames = aquiferNames + return (line, id_line) + + def readPropertiesValues( + self: Self, file: TextIOBase, line: str, id_line: int, total_lines: int + ) -> None: + """Read aquifer property values from geos log file. + + Initialize the m_aquifersPropertiesValues and m_timesteps attributes by reading + the Geos log. If a timestep contains the tag m_computeStatisticsName, the + current timestep is added to m_timesteps and we recover the property values + in m_regionsPropertiesValues. + + Args: + file (TextIOBase): Geos Log file + line (str): last line read in the file. + id_line (int): The id of the last line read in readPhaseNames. + total_lines (int): The number of lines in the file. + """ + aquifsPropertiesValues: dict[str, list[float]] = {} + for aquifName in self.m_aquiferNames: + propVolume: str = aquifName + "__Volume" + propVolumeId: str = fcts.identifyProperties([propVolume])[0] + propRate: str = aquifName + "__VolumetricRate" + propRateId: str = fcts.identifyProperties([propRate])[0] + aquifsPropertiesValues[propVolumeId] = [0.0] + aquifsPropertiesValues[propRateId] = [0.0] + newTimestep, currentDT = fcts.extractTimeAndDt(line) + timesteps: list[float] = [newTimestep] + line = file.readline() + id_line += 1 + while id_line <= total_lines: + if line.startswith("Time:"): + newTimestep, currentDT = fcts.extractTimeAndDt(line) + newTimestep = fcts.convertValues( + ["Time"], [newTimestep], self.m_propertiesUnit + )[0] + if " produces a flux of " in line: + if newTimestep not in timesteps and newTimestep > max( + timesteps, default=0.0 + ): + timesteps.append(newTimestep) + for key in aquifsPropertiesValues: + aquifsPropertiesValues[key].append(0.0) + aquifName, volume = fcts.extractValueAndNameAquifer(line) + rate: float = volume / currentDT + propVol: str = aquifName + "__Volume" + propVolId: str = fcts.identifyProperties([propVol])[0] + propRate = aquifName + "__VolumetricRate" + propRateId = fcts.identifyProperties([propRate])[0] + aquifsPropertiesValues[propVolId][-1] = fcts.convertValues( + [propVol], [volume], self.m_propertiesUnit + )[0] + aquifsPropertiesValues[propRateId][-1] = fcts.convertValues( + [propRate], [rate], self.m_propertiesUnit + )[0] + line = file.readline() + id_line += 1 + self.m_aquifersPropertiesValues = aquifsPropertiesValues + self.m_timesteps = timesteps + + def readAll(self: Self, filepath: str) -> None: + """Initialize all the attributes of the class by reading a Geos log file. + + Args: + filepath (str): Geos log filepath. + """ + with open(filepath) as geosFile: + total_lines: int = fcts.countNumberLines(filepath) + line, id_line = self.readAquiferNames(geosFile) + self.readPropertiesValues(geosFile, line, id_line, total_lines) + + def calculateExtraValues(self: Self) -> None: + """Add cumulated columns for each aquifer volume and aquifer rate.""" + for aquifName in self.m_aquiferNames: + propVolume: str = aquifName + "__Volume" + propVolumeId: str = fcts.identifyProperties([propVolume])[0] + propRate: str = aquifName + "__VolumetricRate" + propRateId: str = fcts.identifyProperties([propRate])[0] + volumes: list[float] = self.m_aquifersPropertiesValues[propVolumeId] + rates: list[float] = self.m_aquifersPropertiesValues[propRateId] + cumuVol_name = aquifName + "__CumulatedVolume" + cumuVolId: str = fcts.identifyProperties([cumuVol_name])[0] + cumuRate_name = aquifName + "__CumulatedVolumetricRate" + cumuRateId: str = fcts.identifyProperties([cumuRate_name])[0] + cumuVol_values: list[float] = [volumes[0]] + cumuRate_values: list[float] = [rates[0]] + for i in range(1, len(volumes)): + cumuVol_values.append(cumuVol_values[i - 1] + volumes[i]) + cumuRate_values.append(cumuRate_values[i - 1] + rates[i]) + self.m_aquifersPropertiesValues[cumuVolId] = cumuVol_values + self.m_aquifersPropertiesValues[cumuRateId] = cumuRate_values + + def createDataframe(self: Self) -> pd.DataFrame: + """Create and fill and return dataframeAquifers. + + Returns: + pd.DataFrame: dataframe with values from Geos log. + """ + try: + colNames: list[str] = [] + colValues: list[float] = [] + for propName, values in self.m_aquifersPropertiesValues.items(): + unitObj: Unit = self.m_propertiesUnit["nounit"] + for propertyType in self.m_propertiesUnit: + if propertyType.lower() in propName.lower(): + unitObj = self.m_propertiesUnit[propertyType] + break + if unitObj.unitLabel == "": + raise ValueError( + "No unit was found for this property name <<" + propName + ">>." + ) + columnName: str = propName + "__" + unitObj.unitLabel + colNames.append(columnName) + colValues.append(values) # type: ignore[arg-type] + timeUnit: Unit = self.m_propertiesUnit["time"] + timeName: str = "Time__" + timeUnit.unitLabel + colNames.append(timeName) + colValues.append(self.m_timesteps) # type: ignore[arg-type] + data = {colNames[i]: colValues[i] for i in range(len(colNames))} + dataframeAquifers: pd.DataFrame = pd.DataFrame(data) + return dataframeAquifers + except ValueError as err: + print(err.args[0]) diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py new file mode 100644 index 00000000..9dd1bb85 --- /dev/null +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py @@ -0,0 +1,131 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +from io import TextIOBase + +import pandas as pd # type: ignore[import-untyped] +from typing_extensions import Self + +import geos_pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos_utils.enumUnits import Unit + + +class GeosLogReaderConvergence: + def __init__(self: Self, filepath: str, propertiesUnit: dict[str, Unit]) -> None: + """Reader for Convergence information. + + Args: + filepath (str): path to geos log file. + propertiesUnit ( dict[str, Unit]): unit preferences + """ + self.m_propertiesUnit: dict[str, Unit] = propertiesUnit + self.m_solversIterationsValues: dict[str, list[float]] = {} + self.m_timesteps: list[float] = [] + self.m_dts: list[float] = [] + + toFindInLog: list[str] = ["Time:"] + if not fcts.elementsAreInLog(filepath, toFindInLog): + print("Invalid Geos log file. Please check that your log did not crash.") + else: + self.readAll(filepath) + self.calculateExtraValues() + + def readIterationsValues(self: Self, file: TextIOBase, total_lines: int) -> None: + """Read iteration values from Geos log file. + + Initialize the m_aquifersPropertiesValues and m_timesteps attributes + by reading the Geos log. If a timestep contains the tag + m_computeStatisticsName, the current timestep is added to m_timesteps + and we recover the property values in m_regionsPropertiesValues. + + Args: + file (TextIOBase): Geos Log file + total_lines (int): The number of lines in the file. + """ + newtonIterId, linearIterId = fcts.identifyProperties( + ["NewtonIter", "LinearIter"] + ) + iterationsValues: dict[str, list[float]] = {newtonIterId: [], linearIterId: []} + timesteps: list[float] = [] + dts: list[float] = [] + line: str = file.readline() + id_line = 1 + while not line.startswith("Time:"): + line = file.readline() + id_line += 1 + while id_line <= total_lines: + if line.startswith("Time:"): + timestep, dt = fcts.extractTimeAndDt(line) + timestep, dt = fcts.convertValues( + ["Time", "Time"], [timestep, dt], self.m_propertiesUnit + ) + if timestep > max(timesteps, default=-9.9e99): + timesteps.append(timestep) + dts.append(dt) + iterationsValues[newtonIterId].append(0.0) + iterationsValues[linearIterId].append(0.0) + elif "NewtonIter:" in line: + newtonIter: int = fcts.extractNewtonIter(line) + if newtonIter > 0: + iterationsValues[newtonIterId][-1] += 1.0 + elif "Linear Solver" in line: + linearIter: int = fcts.extractLinearIter(line) + iterationsValues[linearIterId][-1] += linearIter + line = file.readline() + id_line += 1 + self.m_solversIterationsValues = iterationsValues + self.m_timesteps = timesteps + self.m_dts = dts + + def readAll(self: Self, filepath: str) -> None: + """Initialize all the attributes of the class by reading a Geos log file. + + Args: + filepath (str): Geos log filepath. + """ + with open(filepath) as geosFile: + total_lines: int = fcts.countNumberLines(filepath) + self.readIterationsValues(geosFile, total_lines) + + def calculateExtraValues(self: Self) -> None: + """Add cumulated columns for newtonIter and linearIter.""" + siv: dict[str, list[float]] = self.m_solversIterationsValues + cumulatedNewtonIter, cumulatedLinearIter = fcts.identifyProperties( + ["CumulatedNewtonIter", "CumulatedLinearIter"] + ) + siv[cumulatedNewtonIter] = [] + siv[cumulatedLinearIter] = [] + newtonIterId, linearIterId = fcts.identifyProperties( + ["NewtonIter", "LinearIter"] + ) + newtonIter: list[float] = siv[newtonIterId] + linearIter: list[float] = siv[linearIterId] + sumNewtonIter: float = 0.0 + sumLinearIter: float = 0.0 + for i in range(len(newtonIter)): + sumNewtonIter += newtonIter[i] + sumLinearIter += linearIter[i] + siv[cumulatedNewtonIter].append(sumNewtonIter) + siv[cumulatedLinearIter].append(sumLinearIter) + + def createDataframe(self: Self) -> pd.DataFrame: + """Create and fill and return dataframeSolversIterations. + + Returns: + pd.DataFrame: dataframe with values from Geos log. + """ + colNames: list[str] = [] + colValues: list[float] = [] + for propName, values in self.m_solversIterationsValues.items(): + colNames.append(propName) + colValues.append(values) # type: ignore[arg-type] + timeUnit: str = self.m_propertiesUnit["time"].unitLabel + timeName: str = "Time__" + timeUnit + dtName: str = "dt__" + timeUnit + colNames.append(timeName) + colNames.append(dtName) + colValues.append(self.m_timesteps) # type: ignore[arg-type] + colValues.append(self.m_dts) # type: ignore[arg-type] + data = {colNames[i]: colValues[i] for i in range(len(colNames))} + dataframeSolversIterations: pd.DataFrame = pd.DataFrame(data) + return dataframeSolversIterations diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py new file mode 100644 index 00000000..544febac --- /dev/null +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py @@ -0,0 +1,224 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +from io import TextIOBase +from typing import Union + +import pandas as pd # type: ignore[import-untyped] +from typing_extensions import Self + +import geos_pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos_utils.enumUnits import Unit + + +class GeosLogReaderFlow: + def __init__( + self: Self, + filepath: str, + propertiesUnit: dict[str, Unit], + phaseNames: Union[list[str], None] = None, + ) -> None: + """A reader that reads .txt and .out files containing Geos logs. + + To do that, we use specific tags in the current version of this code. + Supposed tags are: + + * for region names: "Adding Object CellElementRegion" Supposed + line:"Adding Object CellElementRegion named Reservoir from + ObjectManager::Catalog". + * for phase names: "phaseModel" Supposed line: " TableFunction: + fluid_phaseModel1_PhillipsBrineDensity_table". + * for timesteps: "Time:" Supposed line: "Time: 0s, dt:100s, Cycle: 0". + * for CFL properties: "CFL". Supposed line: "compflowStatistics: Max + phase CFL number: 0.00696878" + + Another important tag in the log will be the name + of the flow statistics model used to output 2D data + in the Geos Log. This one will be found automatically. + The flow statistics model that can output flow data are: + + * "SinglePhaseStatistics". + * "CompositionalMultiphaseStatistics". + + Args: + filepath (str): path to Geos log file + propertiesUnit (dict[str, Unit]): unit preferences + phaseNames (list[str], optional): Name of the phases. + + Defaults to []. + """ + self.m_propertiesUnit = propertiesUnit + self.m_regionNames: list[str] = [] + numberPhases: int = fcts.findNumberPhasesSimulation(filepath) + + if phaseNames is None: + phaseNames = [] + self.m_phaseNames: list[str] = fcts.phaseNamesBuilder(numberPhases, phaseNames) + self.m_computeStatisticsName: str = "" + self.m_regionsPropertiesValues: dict[str, list[float]] = {} + self.m_timesteps: list[float] = [] + + toFindInLog: list[str] = ["Adding Object CellElementRegion", "Time: 0"] + if not fcts.elementsAreInLog(filepath, toFindInLog): + print( + "Invalid Geos log file. Please check that your log" + + " did not crash and contains statistics on flow properties." + ) + else: + self.readAll(filepath) + + def readRegionNames(self: Self, file: TextIOBase) -> int: + """Initialize the m_regionNames attribute by reading log file. + + Args: + file (TextIOBase): Geos Log file + + Returns: + int: The id of the last line read that contained the tag + "Adding Object CellElementRegion" + """ + regionsName: list[str] = [] + line: str = file.readline() + id_line: int = 1 + while "Adding Object CellElementRegion" not in line: + line = file.readline() + id_line += 1 + while "Adding Object CellElementRegion" in line: + regionName: str = fcts.extractRegion(line) + regionsName.append(regionName) + line = file.readline() + id_line += 1 + self.m_regionNames = regionsName + return id_line + + def readComputeStatisticsName( + self: Self, file: TextIOBase, id_line: int, total_lines: int + ) -> tuple[int, str]: + """Read flow statistics from the Geos log file. + + Args: + file (TextIOBase): Geos Log file + id_line (int): The id of the last line read in readPhaseNames. + total_lines (int): total number of lines in the file. + + Returns: + tuple[int, str]: Tuple containingt the id of the last line read and + the line. + """ + computeStatisticsName: str = "" + line: str = file.readline() + id_line += 1 + while not line.startswith("Time: 0"): + line = file.readline() + id_line += 1 + keepReading: bool = True + while keepReading: + line = file.readline() + id_line += 1 + if id_line > total_lines: + raise ValueError("No statistics name found in the log") + for regionName in self.m_regionNames: + if regionName in line: + computeStatisticsName = fcts.extractStatsName(line) + keepReading = False + break + self.m_computeStatisticsName = computeStatisticsName + return (id_line, line) + + def readPropertiesValues( + self: Self, file: TextIOBase, id_line: int, total_lines: int, lineTagStats: str + ) -> None: + """Read property values from Geos log file. + + Initialize the m_regionsPropertiesValues and m_timesteps attributes + by reading the Geos log. If a timestep contains the tag + m_computeStatisticsName, the current timestep is added to m_timesteps + and we recover the property values in m_regionsPropertiesValues. + + Args: + file (TextIOBase): Geos Log file + id_line (int): The id of the last line read in readPhaseNames. + total_lines (int): The number of lines in the file. + lineTagStats (str): The first line containing the tag of + the flow statistics model. + """ + regionPropertiesValues: dict[str, list[float]] = {} + newTimestep: float = 0.0 + timesteps: list[float] = [newTimestep] + line: str = lineTagStats + while id_line <= total_lines: + if line.startswith("Time:"): + newTimestep, dt = fcts.extractTimeAndDt(line) + newTimestep = fcts.convertValues( + ["Time"], [newTimestep], self.m_propertiesUnit + )[0] + if self.m_computeStatisticsName in line and "CFL" not in line: + if newTimestep not in timesteps and newTimestep > max( + timesteps, default=0.0 + ): + timesteps.append(newTimestep) + for key in regionPropertiesValues: + regionPropertiesValues[key].append(0.0) + propsName: list[str] = fcts.extractPropertiesFlow( + line, self.m_phaseNames + ) + propsNameId: list[str] = fcts.identifyProperties(propsName) + for propNameId in propsNameId: + if propNameId not in regionPropertiesValues: + regionPropertiesValues[propNameId] = [0.0] + propsValue: list[float] = fcts.extractValuesFlow(line) + valuesConverted: list[float] = fcts.convertValues( + propsName, propsValue, self.m_propertiesUnit + ) + for i, name in enumerate(propsNameId): + regionPropertiesValues[name][-1] = valuesConverted[i] + line = file.readline() + id_line += 1 + self.m_regionsPropertiesValues = regionPropertiesValues + self.m_timesteps = timesteps + + def readAll(self: Self, filepath: str) -> None: + """Initialize all the attributes of the class by reading a Geos log file. + + Args: + filepath (str): Geos log filepath. + """ + with open(filepath) as geosFile: + total_lines: int = fcts.countNumberLines(filepath) + id_line: int = self.readRegionNames(geosFile) + id_line, lineTag = self.readComputeStatisticsName( + geosFile, id_line, total_lines + ) + self.readPropertiesValues(geosFile, id_line, total_lines, lineTag) + + def createDataframe(self: Self) -> pd.DataFrame: + """Create and fill and return dataframeFlow. + + Returns: + pd.DataFrame: dataframe with values from Geos log. + """ + try: + colNames: list[str] = [] + colValues: list[float] = [] + for propName, values in self.m_regionsPropertiesValues.items(): + unitObj: Unit = self.m_propertiesUnit["nounit"] + for propertyType in self.m_propertiesUnit: + if propertyType in propName.lower(): + unitObj = self.m_propertiesUnit[propertyType] + break + if unitObj.unitLabel == "": + raise ValueError( + "No unit was found for this property name <<" + propName + ">>." + ) + columnName: str = propName + "__" + unitObj.unitLabel + colNames.append(columnName) + colValues.append(values) # type: ignore[arg-type] + timeUnit: str = self.m_propertiesUnit["time"].unitLabel + timeName: str = "Time__" + timeUnit + colNames.append(timeName) + colValues.append(self.m_timesteps) # type: ignore[arg-type] + data = {colNames[i]: colValues[i] for i in range(len(colNames))} + dataframeFlow: pd.DataFrame = pd.DataFrame(data) + return dataframeFlow + except ValueError as err: + print(err.args[0]) diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py new file mode 100644 index 00000000..e028e15f --- /dev/null +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py @@ -0,0 +1,293 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +from io import TextIOBase +from typing import Union + +import pandas as pd # type: ignore[import-untyped] +from typing_extensions import Self + +import geos_pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos_utils.enumUnits import Unit + + +class GeosLogReaderWells: + def __init__( + self: Self, + filepath: str, + propertiesUnit: dict[str, Unit], + phaseNames: Union[list[str], None] = None, + numberWellsForMean: int = 1, + ) -> None: + """Read for Wells from Geos log file. + + To do that, we use specific tags in the current version of this code. + Supposed tags are : + + * for well names : "Adding object WellElementRegion" + and "_ConstantBHP_table" Supposed lines: + "Adding Object WellElementRegion named + wellRegion1 from ObjectManager::Catalog". + " TableFunction: wellControls1_ConstantBHP_table". + * for phase names: "phaseModel" + Supposed line: " TableFunction: + fluid_phaseModel1_PhillipsBrineDensity_table". + * for timesteps: "Time:" + Supposed line : "Time: 0s, dt:100s, Cycle: 0" + When it comes to well properties, special tags are used : + " BHP " ; " total rate" ; " total surface volumetric rate" ; + "phase surface volumetric rate" ; "well is shut" ; + "density of phase" ; "total fluid density". + + Args: + filepath (str): path of Geos log file + propertiesUnit (dict[str, Unit]): unit preferences + phaseNames (list[str] | None, optional): Name of the phases. + + Defaults to None. + numberWellsForMean (int, optional): Number of wells. Defaults to 1. + """ + self.m_propertiesUnit: dict[str, Unit] = propertiesUnit + self.m_numberWellsForMean: int = numberWellsForMean + self.m_wellNames: list[str] = [] + numberPhases: int = fcts.findNumberPhasesSimulation(filepath) + + if phaseNames is None: + phaseNames = [] + self.m_phaseNames: list[str] = fcts.phaseNamesBuilder(numberPhases, phaseNames) + self.m_wellsPropertiesValues: dict[str, list[float]] = {} + self.m_timesteps: list[float] = [] + + toFindInLog1: list[str] = [ + "_ConstantBHP_table", + "Time: 0", + " TableFunction: ", + ] + toFindInLog2: list[str] = [ + "_ConstantPhaseRate_table", + "Time: 0", + " TableFunction: ", + ] + foundInLog1: bool = fcts.elementsAreInLog(filepath, toFindInLog1) + foundInLog2: bool = fcts.elementsAreInLog(filepath, toFindInLog2) + if not foundInLog1 or not foundInLog2: + print( + "Invalid Geos log file. Please check that your log" + + " did not crash and contains wells." + ) + else: + self.readAll(filepath) + self.calculateMeanValues() + + def readWellNames(self: Self, file: TextIOBase) -> int: + """Read well names from Geos log file. + + Args: + file (TextIOBase): Geos Log file + id_line (int): The id of the last line read in readPhaseNames. + + Returns: + int: The id of the last line read that contains the tag + "Adding Object WellElementRegion". + """ + wellsName: list[str] = [] + line: str = file.readline() + id_line: int = 1 + intoWellNames: bool = False + while not intoWellNames: + line = file.readline() + id_line += 1 + if "_ConstantBHP_table" in line or "_ConstantPhaseRate_table" in line: + intoWellNames = True + intoTableFunctions: bool = True + while intoTableFunctions: + if "_ConstantBHP_table" in line or "_ConstantPhaseRate_table" in line: + wellName: str = fcts.extractWell(line) + if wellName not in wellsName: + wellsName.append(wellName) + line = file.readline() + id_line += 1 + if " TableFunction: " not in line: + intoTableFunctions = False + self.m_wellNames = wellsName + return id_line + + def initWellPropertiesValues(self: Self) -> None: + """Initialize the m_wellPropertiesValues.""" + props: dict[str, list[float]] = {} + for name in self.m_wellNames: + wName: str = fcts.formatPropertyName(name) + bhp: str = wName + "__BHP" + totalMassRate: str = wName + "__TotalMassRate" + totalSVR: str = wName + "__TotalSurfaceVolumetricRate" + propsNoId: list[str] = [bhp, totalMassRate, totalSVR] + if len(self.m_phaseNames) > 1: + for phase in self.m_phaseNames: + pName: str = fcts.formatPropertyName(phase) + phaseSVR: str = wName + "__SurfaceVolumetricRate" + pName + propsNoId.append(phaseSVR) + propsWithId = fcts.identifyProperties(propsNoId) + for propName in propsWithId: + props[propName] = [0.0] + self.m_wellsPropertiesValues = props + + def readPropertiesValues( + self: Self, file: TextIOBase, id_line: int, total_lines: int + ) -> None: + """Read property values from Geos log file. + + Initialize the m_regionsPropertiesValues and m_timesteps attributes + by reading the Geos log. If a timestep contains the tag + m_computeStatisticsName, the current timestep is added to m_timesteps + and we recover the property values in m_regionsPropertiesValues. + + Args: + file (TextIOBase): Geos Log file + id_line (int): The id of the last line read in readPhaseNames. + total_lines (int): The number of lines in the file. + """ + line: str = file.readline() + id_line += 1 + while not line.startswith("Time: 0"): + line = file.readline() + id_line += 1 + wellsPropertiesValues: dict[str, list[float]] = self.m_wellsPropertiesValues + currentWellName: str = self.m_wellNames[0] + currentPhaseName: str = self.m_phaseNames[0] + newTimestep: float = 0.0 + timesteps: list[float] = [newTimestep] + while id_line <= total_lines: + wellTags = fcts.extractWellTags(line) + if line.startswith("Time:"): + newTimestep, dt = fcts.extractTimeAndDt(line) + newTimestep = fcts.convertValues( + ["Time"], [newTimestep], self.m_propertiesUnit + )[0] + # If at least one well tag is found, this is a well line + if len(wellTags) > 0: + if newTimestep not in timesteps and newTimestep > max( + timesteps, default=0.0 + ): + timesteps.append(newTimestep) + for key in wellsPropertiesValues: + wellsPropertiesValues[key].append(0.0) + newWellName: str = fcts.identifyCurrentWell(line, currentWellName) + if newWellName != currentWellName: + if newWellName in self.m_wellNames: + currentWellName = newWellName + else: + print( + f"Invalid well name <<{newWellName}>> found" + + f" at timestep <<{str(newTimestep)}>>" + + f" in line :\n<<{line}>>.\nAnother correct well" + + f" name <<{currentWellName}>> was used to" + + " correct this.\nExpected well names are :" + + f" {str(self.m_wellNames)}.\n" + ) + if ("phase" in line.lower()) and ("phase surface" not in line.lower()): + newPhaseId: int = fcts.extractPhaseId(line) + if self.m_phaseNames[newPhaseId] != currentWellName: + currentPhaseName = self.m_phaseNames[newPhaseId] + propsName: list[str] = fcts.extractPropertiesWell( + line, currentWellName, currentPhaseName + ) + for name in propsName: + if "density" in name.lower(): + propsName.pop(propsName.index(name)) + if len(propsName) > 0 and "IsShut" not in propsName[0]: + propsNameId: list[str] = fcts.identifyProperties(propsName) + propsValue: list[float] = fcts.extractValuesWell( + line, len(propsName) + ) + valuesConverted: list[float] = fcts.convertValues( + propsName, propsValue, self.m_propertiesUnit + ) + for i, name in enumerate(propsNameId): + wellsPropertiesValues[name][-1] = valuesConverted[i] + + line = file.readline() + id_line += 1 + self.m_wellsPropertiesValues = wellsPropertiesValues + self.m_timesteps = timesteps + + def readAll(self: Self, filepath: str) -> None: + """Initialize all the attributes of the class by reading a Geos log file. + + Args: + filepath (str): Geos log filepath. + singlephase (bool): True if its a singlephase simulation, + False if multiphase. + """ + with open(filepath) as geosFile: + total_lines: int = fcts.countNumberLines(filepath) + id_line = self.readWellNames(geosFile) + self.initWellPropertiesValues() + self.readPropertiesValues(geosFile, id_line, total_lines) + + def calculateMeanValues(self: Self) -> None: + """Calculate mean values of all wells.""" + nbr: int = self.m_numberWellsForMean + wNames: list[str] = self.m_wellNames + pNames: list[str] = self.m_phaseNames + wpv: dict[str, list[float]] = self.m_wellsPropertiesValues + cNames: list[str] = list(wpv.keys()) + bhpNames: list[str] = [n for n in cNames if "bhp" in n.lower()] + totalMassRateNames: list[str] = [ + n for n in cNames if "totalmassrate" in n.lower() + ] + totalSVRNames: list[str] = [ + n for n in cNames if "totalsurfacevolumetricrate" in n.lower() + ] + differentMeanColumns: dict[str, list[str]] = { + "MeanBHP": bhpNames, + "MeanTotalMassRate": totalMassRateNames, + "MeanTotalVolumetricRate": totalSVRNames, + } + for pName in pNames: + pName = fcts.formatPropertyName(pName) + meanName: str = "MeanSurfaceVolumetricRate" + pName + differentMeanColumns[meanName] = [] + for wName in wNames: + wName = fcts.formatPropertyName(wName) + n: str = wName + "__SurfaceVolumetricRate" + pName + n = fcts.identifyProperties([n])[0] + if n in cNames: + differentMeanColumns[meanName].append(n) + for meanName, columns in differentMeanColumns.items(): + if len(columns) > 0: + values: list[list[float]] = [wpv[c] for c in columns] + meanValues: list[float] = [sum(item) / nbr for item in zip(*values)] + meanNameWithId: str = fcts.identifyProperties([meanName])[0] + self.m_wellsPropertiesValues[meanNameWithId] = meanValues + + def createDataframe(self: Self) -> pd.DataFrame: + """Create and fill and return dataframeWells. + + Return: + pd.DataFrame: dataframe with log values. + """ + colNames: list[str] = [] + colValues: list[float] = [] + try: + for propName, values in self.m_wellsPropertiesValues.items(): + unitObj: Unit = self.m_propertiesUnit["nounit"] + for propertyType in self.m_propertiesUnit: + if propertyType.lower() in propName.lower(): + unitObj = self.m_propertiesUnit[propertyType] + break + if unitObj.unitLabel == "": + raise ValueError( + "No unit was found for this property name <<" + propName + ">>." + ) + columnName: str = propName + "__" + unitObj.unitLabel + colNames.append(columnName) + colValues.append(values) # type: ignore[arg-type] + except ValueError as err: + print(err.args[0]) + timeUnit: str = self.m_propertiesUnit["time"].unitLabel + timeName: str = "Time__" + timeUnit + colNames.append(timeName) + colValues.append(self.m_timesteps) # type: ignore[arg-type] + data = {colNames[i]: colValues[i] for i in range(len(colNames))} + dataframeWells: pd.DataFrame = pd.DataFrame(data) + return dataframeWells diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py b/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py new file mode 100644 index 00000000..4b4a0b70 --- /dev/null +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py @@ -0,0 +1,1048 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +# ruff: noqa: E402 # disable Module level import not at top of file +import contextlib +import re +from copy import deepcopy +from typing import Any, Union + +from geos_utils.enumUnits import Unit, convert + +__doc__ = """Functions to read and process Geos log.""" + + +def extractRegion(geosLogLine: str) -> str: + """Extracts the name of the region from a Geos log line. + + Args: + geosLogLine (str): #expected line : "Adding Object CellElementRegion + named Reservoir from ObjectManager::Catalog." + + Raises: + ValueError: "Not enough elements to unpack in line." + ValueError: "An error has occured while parsing a line." + + + Returns: + str: "Reservoir" + """ + try: + lineElements: list[str] = geosLogLine.split() + namedElementIndex: int = lineElements.index("named") + if len(lineElements) > namedElementIndex + 1: + return lineElements[namedElementIndex + 1] + else: + raise ValueError( + "Not enough elements to unpack in region line <<" + geosLogLine + ">>" + ) + except Exception as e: + raise ValueError( + "An error has occured while parsing region line <<" + geosLogLine + ">>" + ) from e + + +def extractWell(geosLogLine: str) -> str: + """Extracts the name of the well from a Geos log line. + + Args: + geosLogLine (str): #expected line : + " TableFunction: wellControls_ConstantBHP_table" + Or + " TableFunction: wellControls_ConstantPhaseRate_table" + + Raises: + ValueError: "An error has occured while parsing <<" + geosLogLine + ">>" + + Returns: + str: "wellControls" + """ + try: + lineElements: list[str] = geosLogLine.split(":") + wellName: str = lineElements[1].replace(" ", "") + indexEndName: int + if "_ConstantBHP_table" in wellName: + indexEndName = wellName.index("_ConstantBHP_table") + elif "_ConstantPhaseRate_table" in wellName: + indexEndName = wellName.index("_ConstantPhaseRate_table") + else: + raise ValueError( + "The expected format was not found when parsing line <<" + + geosLogLine + + ">>" + ) + return wellName[:indexEndName] + except Exception as e: + raise ValueError( + "An error has occured while parsing region line <<" + geosLogLine + ">>" + ) from e + + +def extractAquifer(geosLogLine: str) -> str: + """Extracts the name of the aquifer from a Geos log line. + + Args: + geosLogLine (str): #expected line : " TableFunction: + aquifer1_pressureInfluence_table" + + Raises: + ValueError: "An error has occured while parsing <<" + + geosLogLine + + ">>" + + Returns: + str: "aquifer1" + """ + try: + lineElements: list[str] = geosLogLine.split(":") + aquiferName: str = lineElements[1].replace(" ", "") + indexEndName: int = aquiferName.index("_pressureInfluence_table") + return aquiferName[:indexEndName] + except Exception as e: + raise ValueError( + "An error has occured while parsing region line <<" + geosLogLine + ">>" + ) from e + + +def extractStatsName(geosLogLine: str) -> str: + """Extracts the name of the computed statistics name from a Geos log line. + + Args: + geosLogLine (str): #expected line :"compflowStatistics, Reservoir: + Pressure (min, average, max): 2.86419e+07, 2.93341e+07, 3.006e+07 Pa" + + Returns: + str: "compflowStatistics" + """ + lineElements: list[str] = geosLogLine.split(",") + return lineElements[0] + + +def extractPhaseModel(geosLogLine: str) -> str: + """Extracts the name of a phase model from a Geos log line. + + Args: + geosLogLine (str): #expected line : " TableFunction: + fluid_phaseModel1_PhillipsBrineDensity_table" + + Raises: + ValueError: "Not enough elements to unpack in line <<" + geosLogLine + ">>." + ValueError: "An error has occured while parsing <<" + geosLogLine + ">>" + + Returns: + str: "PhillipsBrineDensity" + """ + try: + cleanLine: str = replaceSpecialCharactersWithWhitespace(geosLogLine) + lineElements: list[str] = cleanLine.split() + phaseModels: list[str] = [elt for elt in lineElements if "phaseModel" in elt] + matchingPhaseModel: str = phaseModels[0] + phaseModelElementIndex: int = lineElements.index(matchingPhaseModel) + if len(lineElements) > phaseModelElementIndex + 1: + return lineElements[phaseModelElementIndex + 1] + else: + raise ValueError("Not enough elements to unpack in <<" + geosLogLine + ">>") + except Exception as e: + raise ValueError( + "An error has occured while parsing <<" + geosLogLine + ">>" + ) from e + + +def extractPropertiesFlow(geosLogLine: str, phasesName: list[str]) -> list[str]: + """Extracts flow property from a Geos log line. + + Args: + geosLogLine (str): #expected line : + "compflowStatistics, Reservoir: Delta pressure (min, max): 0, 0 Pa" + phasesName (list[str]): ["CO2","Water"] + + Raises: + ValueError: "Not enough elements to unpack in line <<" + geosLogLine + ">>." + ValueError: "An error has occured while parsing <<" + geosLogLine + ">>" + + Returns: + list[str]: ["Reservoir__DeltaPressureMin", "Reservoir__DeltaPressureMax"] + """ + try: + lineBlocks: list[str] = geosLogLine.split(":") + if len(lineBlocks) == 3: + propertyLineBlock: str = lineBlocks[1] + propertiesName: list[str] = buildPropertiesNameFromGeosProperties( + propertyLineBlock, phasesName + ) + statsBlock: str = lineBlocks[0] + statsElements: list[str] = statsBlock.split() + if len(statsElements) >= 2: + regionName: str = statsElements[1] + formattedRegion: str = formatPropertyName(regionName) + formattedProps = [formatPropertyName(prop) for prop in propertiesName] + propertiesFlow: list[str] = [ + formattedRegion + "__" + prop for prop in formattedProps + ] + return propertiesFlow + else: + raise ValueError( + "Incorrect number of blocks in line <<" + + geosLogLine + + ">> for it to find property name." + ) + except Exception as e: + raise ValueError( + "An error has occured while parsing <<" + geosLogLine + ">>" + ) from e + return [] + + +def buildPropertiesNameFromGeosProperties( + geosProperties: str, phasesName: list[str] +) -> list[str]: + """Extracts the property name and its extensions like min, max, average. + + Args: + geosProperties (str): " Delta pressure (min, max)" + phasesName (list[str]): ["CO2","Water"] + + Returns: + list[str]: [" Delta pressure min", " Delta pressure max"] + """ + separatedNameAndExtension: list[str] = geosProperties.split("(") + nameBlock: str = separatedNameAndExtension[0] + finalPropertiesName: list[str] = [] + if " phase " in geosProperties or " Phase " in geosProperties: + finalPropertiesName = buildPropertiesNameForPhases(nameBlock, phasesName) + elif " component " in geosProperties or " Component " in geosProperties: + finalPropertiesName = buildPropertiesNameForComponents(phasesName) + else: + # means that extensions have been found + if len(separatedNameAndExtension) == 2: + extensions: str = separatedNameAndExtension[1] + finalPropertiesName = buildPropertiesNameNoPhases(nameBlock, extensions) + else: + finalPropertiesName = buildPropertiesNameNoPhases(nameBlock) + return finalPropertiesName + + +def buildPropertiesNameForPhases(nameBlock: str, phasesName: list[str]) -> list[str]: + """Replace phase by phase names. + + Args: + nameBlock (str): " Mobile phase mass" + phasesName (list[str]): ["CO2","Water"] + + Returns: + list[str]: ['Mobile CO2 mass', 'Mobile Water mass'] + """ + propertiesName: list[str] = [] + for phaseName in phasesName: + if " phase " in nameBlock: + newName: str = nameBlock.replace("phase", phaseName) + else: + newName = nameBlock.replace("Phase", phaseName) + propertiesName.append(newName) + return propertiesName + + +def buildPropertiesNameForComponents(phasesName: list[str]) -> list[str]: + """Builds the list of component property names from the list of phases name. + + Args: + phasesName (list): ["CO2","Water"] + + Returns: + list: ['Dissolved mass CO2 in CO2','Dissolved mass Water in CO2', + 'Dissolved mass CO2 in Water','Dissolved mass Water in Water'] + """ + propertiesName: list[str] = [] + for i in range(len(phasesName)): + for j in range(len(phasesName)): + newName: str = f"Dissolved mass {phasesName[j]} in {phasesName[i]}" + propertiesName.append(newName) + return propertiesName + + +def buildPropertiesNameNoPhases(nameBlock: str, extensions: str = "") -> list[str]: + """From a name block and extensions, builds a list of properties name. + + Args: + nameBlock (str): " Delta pressure " + extensions (str): "min, max)" + + Returns: + list: [" Delta pressure min", " Delta pressure max"] + """ + if extensions != "" and "metric" not in extensions: + extensionsClean = replaceSpecialCharactersWithWhitespace(extensions) + extensionsName = extensionsClean.split() + propertiesName = [nameBlock + " " + ext for ext in extensionsName] + else: + propertiesName = [nameBlock] + return propertiesName + + +def identifyProperties(properties: list[str]) -> list[str]: + """Identify properties and add identifer. + + From a list of properties name, identifies each of them with a certain + integer, to link it to a meta property by adding an id in front of the + property name. + + Args: + properties (list[str]): ["CaprockPressureMax", "CaprockPressureMin"] + + Returns: + list[tuple[str, int]]: [1:"CaprockPressureMax", 1:"CaprockPressureMin"] + """ + idProps: list[str] = [] + # the order of the first element of every tuple is mandatory + propertiesIdentifiers: list[tuple[str, str]] = [ + ("deltapressure", "0"), + ("pressure", "1"), + ("temperature", "2"), + ("totaldynamicporevolume", "3"), + ("dynamicporevolumes", "4"), + ("nontrapped", "5"), + ("trapped", "6"), + ("immobile", "7"), + ("mobile", "8"), + ("dissolved", "9"), + ("meanbhp", "15"), + ("meantotalmassrate", "16"), + ("meantotalvolumetricrate", "17"), + ("meansurfacevolumetricrate", "18"), + ("totalmassrate", "12"), + ("totalvolumetricrate", "13"), + ("totalsurfacevolumetricrate", "13"), + ("surfacevolumetricrate", "14"), + ("totalfluidmass", "36"), + ("cellfluidmass", "37"), + ("mass", "10"), + ("bhp", "11"), + ("cumulatedvolumetricrate", "19"), + ("cumulatedvolume", "20"), + ("volumetricrate", "21"), + ("volume", "22"), + ("newtoniter", "23"), + ("lineariter", "24"), + ("stress", "25"), + ("displacement", "26"), + ("permeability", "27"), + ("porosity", "28"), + ("ratio", "29"), + ("fraction", "30"), + ("bulkmodulus", "31"), + ("shearmodulus", "32"), + ("oedometricmodulus", "33"), + ("points", "34"), + ("density", "35"), + ("time", "38"), + ("dt", "39"), + ] + for prop in properties: + identification: bool = False + for propId in propertiesIdentifiers: + if propId[0] in prop.lower(): + idProps.append(propId[1] + ":" + prop) + identification = True + break + if not identification: + raise ValueError( + f"The property <<{prop}>> could not be identified.\n" + + "Check that your list of meta properties is updated." + ) + return idProps + + +# TODO check if this function works when having more than 2 components + + +def extractValuesFlow(geosLogLine: str) -> list[float]: + """Extract values from a Geos log line. + + Args: + geosLogLine (str): Geos log line + "compflowStatistics, Reservoir: " + "Dissolved component mass: { { 0, 0 }, { 0, -6.38235e+10 } } kg" + + Returns: + list[float]: list of values in the line. + [0.0, 0.0, 0.0, -6.38235e+10] + """ + lineElements: list[str] = geosLogLine.split(":") + valuesBlock: str = lineElements[-1] + valuesBlock = valuesBlock.replace(",", " ") + valuesFound: list[float] = extractFloatsFromString(valuesBlock) + return valuesFound + + +def convertValues( + propertyNames: list[str], + propertyValues: list[float], + propertiesUnit: dict[str, Unit], +) -> list[float]: + """Convert properties to the desired units. + + Knowing two lists : 1) float numbers that are supposed to be in + SI units ; 2) properties name linked to the float numbers. + And that these lists are of same dimension, creates a new list of + float values converted to a specific unit linked to the property name. + + Args: + propertyNames (list[str]): list of property names + propertyValues (list[float]): list of property values. + propertiesUnit (dict[str, Unit]): dictionary of desired units for each + property + {"pressure": UnitPressure, ..., "propertyTypeN": UnitPropertyTypeN} + + Returns: + list[float]: list of converted values. + """ + assert len(propertyNames) == len(propertyValues) + valuesConverted: list[float] = [] + for index, name in enumerate(propertyNames): + unitObj: Unit = propertiesUnit["nounit"] + for propertyType in propertiesUnit: + if propertyType.lower() in name.lower(): + unitObj = propertiesUnit[propertyType] + break + valueConverted: float = convert(propertyValues[index], unitObj) + valuesConverted.append(valueConverted) + return valuesConverted + + +def identifyCurrentWell(geosLogLine: str, lastWellName: str) -> str: + """Identify the current name of the well rom a Geos log line. + + Because properties values of wells can be output without specifying + the name of the well which they belong to, we have to assume that + the name of well for the current properties observed is either : + + - the name of the well that can be found inside the line + - the name of the last well name found in former lines. + + Args: + geosLogLine (str): line from Geos log file + #expected lines with well name : + "Rank 18: well.CO2001: BHP (at the specified reference + elevation): 19318538.400682557 Pa" + Or + "wellControls1: BHP (at the specified reference + elevation): 12337146.157562563 Pa" + #line with no well name : + "The total rate is 0 kg/s, which corresponds to a + total surface volumetric rate of 0 sm3/s" + lastWellName (str): name of the last well found + + Raises: + ValueError: "An error has occured while parsing <>." + + Returns: + str: "wellControls" + """ + if ":" in geosLogLine: + lineElements: list[str] = geosLogLine.split(":") + if geosLogLine.startswith("Rank"): + wellName: str = lineElements[1] + else: + wellName = lineElements[0] + else: + wellName = lastWellName + wellName = wellName.lstrip().rstrip() + return wellName + + +def extractPropertiesWell(geosLogLine: str, wellName: str, phaseName: str) -> list[str]: + """Extracts the well property presented from a Geos log line. + + Args: + geosLogLine (str): "wellControls1: Phase 0 surface + volumetric rate: 30.023748128796043 sm3/s" + wellName (str): well1 + phaseName (str): phase1 + + Returns: + list[str]: ["Well1_SurfaceVolumetricRatePhase1"] + """ + wName: str = formatPropertyName(wellName) + pName: str = formatPropertyName(phaseName) + tags_association = { + "BHP": wName + "__BHP", + "total massRate": wName + "__TotalMassRate", + "total surface volumetricRate": wName + "__TotalSurfaceVolumetricRate", + "phase surface volumetricRate": wName + "__SurfaceVolumetricRate" + pName, + "well is shut": wName + "__IsShut", + "density of phase": wName + "__DensityOf" + pName, + "total fluid density": wName + "__TotalFluidDensity", + } + tags_found: list[str] = extractWellTags(geosLogLine) + propertiesWell: list[str] = [] + for tag in tags_found: + correspondingName = tags_association[tag] + propertiesWell.append(correspondingName) + return propertiesWell + + +def extractPhaseId(geosLogLine: str) -> int: + """Extracts the phase number id from a Geos log line. + + Args: + geosLogLine (str): #expected line : "wellControls1: + Phase 0 surface volumetric rate: 30.023748128796043 sm3/s" + + Raises: + ValueError: "Not enough elements to unpack in line <<" + geosLogLine + ">>." + ValueError: "An error has occured while parsing <<" + geosLogLine + ">>" + + Returns: + int: 0 + """ + try: + lineElements: list[str] = geosLogLine.lower().split() + phaseElementIndex: int = lineElements.index("phase") + if len(lineElements) > phaseElementIndex + 1: + return int(lineElements[phaseElementIndex + 1]) + else: + raise ValueError( + "Not enough elements to unpack in region line <<" + geosLogLine + ">>" + ) + except Exception as e: + raise ValueError( + "An error has occured while parsing region line <<" + geosLogLine + ">>" + ) from e + + +def extractWellTags(geosLogLine: str) -> list[str]: + """Extracts the list of well property tags available from a Geos log line. + + Args: + geosLogLine (str): line from geos log file. + + Returns: + list[str]: list of tags. + """ + if geosLogLine.startswith("Control switch"): + return [] + lower_geosLogLine = geosLogLine.lower() + tags_found_line: list[str] = [] + if "well is shut" in lower_geosLogLine: + tags_found_line.append("well is shut") + elif " bhp " in lower_geosLogLine: + tags_found_line.append("BHP") + elif "total rate" in lower_geosLogLine: + tags_found_line.append("total massRate") + if "total surface volumetric rate" in lower_geosLogLine: + tags_found_line.append("total surface volumetricRate") + elif "surface volumetric rate" in lower_geosLogLine: + tags_found_line.append("phase surface volumetricRate") + elif "density of phase" in lower_geosLogLine: + tags_found_line.append("density of phase") + elif "total fluid density" in lower_geosLogLine: + tags_found_line.append("total fluid density") + return tags_found_line + + +def extractValuesWell(geosLogLine: str, numberProperties: int) -> list[float]: + """Extract values from Geos log line and returns them as a list of floats. + + The idea here is first to extract all floats values from the line. + Now all of them are useful so we need to keep some of them. + Luckily, only the last one or two floats are useful. And to determine + that, we use the number of well properties found in the line which ranges + from one to two. + + Args: + geosLogLine (str): "Rank 129: well.CO2010: The density of + phase 0 at surface conditions is 1.86 kg/sm3." + numberProperties (int): number of well properties found in the line. + + Returns: + list[float]: value of the property. e.g. [1.86] + """ + try: + if numberProperties > 0: + valuesFound: list[float] = extractFloatsFromString(geosLogLine) + if len(valuesFound) >= numberProperties: + usefulValues: list[float] = valuesFound[-numberProperties:] + return usefulValues + else: + raise ValueError( + "Number of floats found in line is inferior to number of well properties" + + " in line <<" + + geosLogLine + + ">>." + ) + else: + raise ValueError( + "No well property found in the well property line <<" + + geosLogLine + + ">>." + ) + except Exception as e: + raise ValueError( + "Well line not corresponding to expected layering <<" + geosLogLine + ">>." + ) from e + + +def extractValueAndNameAquifer(geosLogLine: str) -> tuple[str, float]: + """Extract value and name of the aquifer contained in a Geos log line. + + Args: + geosLogLine (str): "FlowSolverBase compositionalMultiphaseFlow + (SimuDeck_aquifer_pression_meme.xml, l.28): at time 100s, the + boundary condition 'aquifer1' produces a flux of + -0.6181975187076816 kg (or moles if useMass=0)." + + Returns: + tuple[str, float]: a tuple with the name and the float value. + e.g. ("aquifer1", -0.6181975187076816) + """ + try: + lineElements: list[str] = geosLogLine.split() + indexAquifName: int = lineElements.index("produces") - 1 + indexValue: int = lineElements.index("flux") + 2 + if 0 < indexAquifName < indexValue and indexValue < len(lineElements): + aquifName: str = lineElements[indexAquifName].replace("'", "") + value: float = float(lineElements[indexValue]) + return (aquifName, value) + else: + raise ValueError( + "Aquifer name or aquifer property value is not given in the line <<" + + geosLogLine + + ">>." + ) + except Exception as e: + raise ValueError( + "Aquifer line not corresponding to expected layering <<" + + geosLogLine + + ">>." + ) from e + + +def correctZeroValuesInListOfValues(values: list[float]) -> list[float]: + """Replace orhphelin 0 values of input list. + + If 0 values are found in a list of values, either replace them with the + value found before in the list or keep it 0. We suppose that 2 zeros in a + row correspond to a continuity of the values, hence keeping it 0. + + Args: + values (list[float]): list of ints or floats + + Returns: + list[float]: list of ints or floats + """ + valuesCorrected: list[float] = deepcopy(values) + for i in range(1, len(values) - 1): + valueChecked: float = values[i] + if valueChecked == 0: + valueBefore: float = values[i - 1] + valueAfter: float = values[i + 1] + if valueBefore != 0 or valueAfter != 0: + valuesCorrected[i] = valueBefore + return valuesCorrected + + +def extractTimeAndDt(geosLogLine: str) -> tuple[float, float]: + """From a Geos log line, extracts the float values of Time and dt. + + Args: + geosLogLine (str): #expected lines : + "Time: {} years, {} days, {} hrs, {} min, {} s, dt: {} s, Cycle: {}" + "Time: {:.2f} years, dt: {} s, Cycle: {}" + "Time: {:.2f} days, dt: {} s, Cycle: {}" + "Time: {:.2f} hrs, dt: {} s, Cycle: {}" + "Time: {:.2f} min, dt: {} s, Cycle: {}" + "Time: {:4.2e} s, dt: {} s, Cycle: {}" + "Time: {]s, dt:{}s, Cycle: {}" + + Raises: + KeyError: "Cannot add time values for tag=<>" + + Returns: + tuple[float]: (time, dt) + """ + timeCounter: dict[str, float] = {"years": 0, "days": 0, "hrs": 0, "min": 0, "s": 0} + timeTag: str = "Time:" + try: + indexDT: int = geosLogLine.index("dt:") + cycleIndex: int = geosLogLine.index("Cycle:") + except ValueError: + print( + "The log line does not have valid format :\n<<" + + geosLogLine.rstrip() + + ">>\nDefault value of 0.0 returned." + ) + return (0.0, 0.0) + timePart: str = geosLogLine[len(timeTag) : indexDT] + # timePart = " {} years, {} days, {} hrs, {} min, {} s, " + timePart = timePart.replace(" ", "")[:-1] + # timePart = "{}years,{}days,{}hrs,{}min,{}s" + timeElts: list[str] = timePart.split(",") + # timeElts = ["{}years", "{}days", "{}hrs", "{}min", "{}s"] + for elt in timeElts: + lastDigitIndex: int = 0 + for i, caracter in enumerate(elt): + if caracter.isdigit(): + lastDigitIndex = i + timeValue: float = float(elt[: lastDigitIndex + 1]) + timeFactor: str = elt[lastDigitIndex + 1 :] + try: + timeCounter[timeFactor] += float(timeValue) + except KeyError: + print(f"Cannot add time values for tag=<<{timeFactor}>>") + totalTime: float = timeInSecond(timeCounter) + + dtPart: str = geosLogLine[indexDT:cycleIndex] + # dtPart = "dt: {} s, " + dtPart = dtPart.replace(" ", "")[3:-2] + # dtPart = "{}" + dt: float = float(dtPart) + return (totalTime, dt) + + +def timeInSecond(timeCounter: dict[str, float]) -> float: + """Calculates the time in s from a dict of different time quantities. + + Args: + timeCounter (dict[str, float]): timeCounter: + {"years": x0, "days": x0, "hrs": x0, "min": x0, "s": x0} + + Returns: + float: Sum in seconds of all time quantities. + """ + yearsToSeconds: float = timeCounter["years"] * 365.25 * 86400 + daysToSeconds: float = timeCounter["days"] * 86400 + hrsToSeconds: float = timeCounter["hrs"] * 3600 + minsToSeconds: float = timeCounter["min"] * 60 + s: float = timeCounter["s"] + return yearsToSeconds + daysToSeconds + hrsToSeconds + minsToSeconds + s + + +def extractNewtonIter(geosLogLine: str) -> int: + """From a Geos log line, extracts the int value of NewtonIter. + + Args: + geosLogLine (str): #expected line : + " Attempt: 0, ConfigurationIter: 0, NewtonIter: 0" + + Raises: + ValueError: "Not enough elements to unpack in time line <<" + geosLogLine + ">>." + + ValueError: "An error has occured while parsing <<" + geosLogLine + ">>" + + Returns: + int: NewtonIter + """ + try: + lineClean: str = replaceSpecialCharactersWithWhitespace(geosLogLine) + lineElements: list[str] = lineClean.split() + newtonIterIndex: int = lineElements.index("NewtonIter") + if len(lineElements) > newtonIterIndex + 1: + newtonIter: str = lineElements[newtonIterIndex + 1] + return int(newtonIter) + else: + raise ValueError( + "Not enough elements to unpack in line <<" + geosLogLine + ">>." + ) + except Exception as e: + raise ValueError( + "An error has occured while parsing <<" + geosLogLine + ">>" + ) from e + + +def extractLinearIter(geosLogLine: str) -> int: + """From a Geos log line, extracts the int value of linear iterations. + + Args: + geosLogLine (str): #expected line : + " Linear Solver | Success | Iterations: 23 | Final Rel Res: + 5.96636e-05 | Make Restrictor Time: 0 | Compute Auu Time: 0 | + SC Filter Time: 0 | Setup Time: 1.5156 s | Solve Time: 0.041093 s" + + Raises: + ValueError: "Not enough elements to unpack in time line <<" + geosLogLine + ">>." + ValueError: "An error has occured while parsing <<" + geosLogLine + ">>" + + Returns: + int: 23 + """ + try: + lineClean: str = replaceSpecialCharactersWithWhitespace(geosLogLine) + lineElements: list[str] = lineClean.split() + iterIndex: int = lineElements.index("Iterations") + if len(lineElements) > iterIndex + 1: + linearIter: str = lineElements[iterIndex + 1] + return int(linearIter) + else: + raise ValueError( + "Not enough elements to unpack in line <<" + geosLogLine + ">>." + ) + except Exception as e: + raise ValueError( + "An error has occured while parsing <<" + geosLogLine + ">>" + ) from e + + +""" +String treatments functions +""" + + +def replaceSpecialCharactersWithWhitespace(sentence: str) -> str: + """Replace every special characters in a string with whitespaces. + + Args: + sentence (str): Random string "hi '(_there(''&*$^,:;'" + + Returns: + str: "hi there " + """ + cleanSentence: str = re.sub("[^a-zA-Z0-9\n.+]", " ", sentence) + return cleanSentence + + +def formatPropertyName(propertyName: str) -> str: + """Clean the string by replacing special characters and removing spaces. + + Args: + propertyName (str): name;of:the property + + Returns: + str: NameOfTheProperty + """ + propertyClean: str = replaceSpecialCharactersWithWhitespace(propertyName) + propertyElements: list[str] = propertyClean.split() + capitalizedPropertyElements: list[str] = [ + elt[0].upper() + elt[1:] for elt in propertyElements + ] + formattedName: str = "" + for element in capitalizedPropertyElements: + formattedName += element + return formattedName + + +def extractFloatsFromString(line: str) -> list[float]: + """Extracts a list of float numbers from a string. + + Args: + line (str): A random string. + + Returns: + list[float]: [float1, ..., floatN] + """ + lineModified: str = deepcopy(line) + replacements: list[str] = ["[", "]", "{", "}"] + for replacement in replacements: + lineModified = lineModified.replace(replacement, " ") + elements: list[str] = lineModified.split() + floats: list[float] = [] + for elt in elements: + if isFloat(elt): + floats.append(float(elt)) + return floats + + +# from https://stackoverflow.com/a/20929881 +def isFloat(element: Any) -> bool: # noqa: ANN401 # disable Any error + """Check whether an element is float or not. + + Args: + element (Any): input number to test. + + Returns: + bool: True if the number is a float. + """ + if element is None: + return False + try: + float(element) + return True + except ValueError: + return False + + +def extractListIntsFromString(string: str) -> list[int]: + """Builds a list of int numbers from a string. + + Args: + string (str): A random string. + + Returns: + list[int]: [int1, ..., intN] + """ + intsFound: list[int] = [] + cleanString: str = replaceSpecialCharactersWithWhitespace(string) + lineElements: list[str] = cleanString.split() + for elt in lineElements: + with contextlib.suppress(ValueError): + intsFound.append(int(elt)) + return intsFound + + +def extractFirstIntFromString(string: str) -> int: + """Extracts the first int value from a string. + + Args: + string (str): A random string. + + Returns: + int or None if no int was found. + """ + cleanString: str = replaceSpecialCharactersWithWhitespace(string) + lineElements: list[str] = cleanString.split() + for elt in lineElements: + try: + intFound: int = int(elt) + return intFound + except ValueError: + pass + raise ValueError("Line does not contain int value.") + + +def countNumberLines(filepath: str) -> int: + """Reads a file to find the number of lines within it. + + Args: + filepath (str): Path to the file. + + Returns: + int: Number of lines in file. + """ + with open(filepath) as file: + numberLines = len(file.readlines()) + return numberLines + + +def elementsAreInLog(filepath: str, elements: list[str]) -> bool: + """Indicates if input file contains element from input list of string. + + To do so, this reads a file and checks at every line if an + element was found within the line. If an element is found, it is not + checked again. The function returns True only when there is no more + element to check. + + Args: + filepath (str): Path to the file. + elements (list[str]): Every string that needs to be find + inside the file. + + Returns: + bool: + """ + assert len(elements) > 0 + with open(filepath) as file: + for line in file: + if len(elements) == 0: + return True + for element in elements: + if element in line: + indexElement: int = elements.index(element) + elements.pop(indexElement) + break + return False + + +def findNumberPhasesSimulation(filepath: str) -> int: + """Find the number of phases from Geos log file. + + Geos logs do not have explicit message telling you how many phases + were used to perform the simulation, unlike regions, wells etc ... + Therefore, we need at least to identify the exact number of phases that + can be find in this Geos log file to extract correctly properties + regarding phase related data. + + Args: + filepath (str): Filepath to a Geos log file. + + Returns: + int: The number of phases found in the Geos log. + """ + numberLines: int = countNumberLines(filepath) + # arbitrary number of minimum lines to consider the log as readable + assert numberLines > 50 + with open(filepath) as geosFile: + line: str = geosFile.readline() + id_line: int = 1 + while not line.startswith("Time:") and id_line <= numberLines: + line = geosFile.readline() + id_line += 1 + if line.startswith("Adding Solver of type") and ( + "singlephase" in line.lower() + ): + return 1 + maxPhaseIdWell: int = -1 + while id_line <= numberLines: + line = geosFile.readline() + id_line += 1 + if "Phase mass" in line or "Phase dynamic" in line: + valuesFound: list[float] = extractValuesFlow(line) + return len(valuesFound) + lowLine: str = line.lower() + phaseTags: list[str] = [" phase ", " surface "] + if ( + all(tag in lowLine for tag in phaseTags) + and "phase surface" not in lowLine + ): + phaseIdWell: int = extractPhaseId(line) + if maxPhaseIdWell < phaseIdWell: + maxPhaseIdWell = phaseIdWell + else: + return maxPhaseIdWell + 1 + return 0 + + +def transformUserChoiceToListPhases(userChoice: Union[str, None]) -> list[str]: + """Get a list of phase name from the input string. + + When using GeosLogReader, the user can choose the names of the phases + to use. The wished format is to specify each name in the good, separated + by whitespaces or either commas. + + Args: + userChoice (str | None): Output from EnterPhaseNames string vector widget. + + Returns: + list[str]: [phase0, phase1, ..., phaseN] + """ + if userChoice is None: + return [] + choice: str = deepcopy(userChoice) + # Regular expression pattern to match any symbol that is not + # alphanumeric, comma, or whitespace + pattern = r"[^\w ,]" + matches = re.findall(pattern, userChoice) + if bool(matches): + print( + "You cannot use symbols except for commas." + + " Please separate your phase names with whitespace" + + " or with commas." + ) + return [] + choiceClean: str = choice.replace(",", " ") + phaseNames: list[str] = choiceClean.split() + return phaseNames + + +def phaseNamesBuilder(numberPhases: int, phasesFromUser: list[str]) -> list[str]: + """Build phase names. + + When creating phase names, the user can or cannot have defined his + own phase names when reading the log. Therefore, whether phase names + were provided or not, if we have N phases found in the log, a list of + N phase names will be created, starting from phase0 up to phaseN. + + Args: + numberPhases (int): Number of phases found in the log file. + phasesFromUser (list[str]): names chosen by the user, can be more + or less than numberPhases. + + Returns: + list[str]: [nameFromUser0, nameFromUser1, ..., phaseN-1, phaseN] + """ + phaseNames: list[str] = [] + size: int = len(phasesFromUser) + for i in range(numberPhases): + if i + 1 > size: + phaseNames.append("phase" + str(i)) + else: + phaseNames.append(phasesFromUser[i]) + return phaseNames diff --git a/geos-pv/src/geos_pv/py.typed b/geos-pv/src/geos_pv/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/geos-pv/src/geos_pv/utils/__init__.py b/geos-pv/src/geos_pv/utils/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/geos-pv/src/geos_pv/utils/checkboxFunction.py b/geos-pv/src/geos_pv/utils/checkboxFunction.py new file mode 100644 index 00000000..7fce5261 --- /dev/null +++ b/geos-pv/src/geos_pv/utils/checkboxFunction.py @@ -0,0 +1,22 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +# ruff: noqa +# type: ignore +def createModifiedCallback(anobject): + """Helper for the creation and use of vtkDataArraySelection in ParaView. + + Args: + anobject: any object. + """ + import weakref + + weakref_obj = weakref.ref(anobject) + anobject = None + + def _markmodified(*args, **kwars): + o = weakref_obj() + if o is not None: + o.Modified() + + return _markmodified diff --git a/geos-pv/src/geos_pv/utils/paraviewTreatments.py b/geos-pv/src/geos_pv/utils/paraviewTreatments.py new file mode 100644 index 00000000..3c51a27c --- /dev/null +++ b/geos-pv/src/geos_pv/utils/paraviewTreatments.py @@ -0,0 +1,608 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto, Martin Lemay +# ruff: noqa: E402 # disable Module level import not at top of file +from enum import Enum +from typing import Any, Union + +import numpy as np +import numpy.typing as npt +import pandas as pd # type: ignore[import-untyped] +from paraview.modules.vtkPVVTKExtensionsMisc import ( # type: ignore[import-not-found] + vtkMergeBlocks, +) +from paraview.simple import ( # type: ignore[import-not-found] + FindSource, + GetActiveView, + GetAnimationScene, + GetDisplayProperties, + GetSources, + servermanager, +) +import vtkmodules.util.numpy_support as vnp +from vtkmodules.vtkCommonCore import ( + vtkDataArray, + vtkDataArraySelection, + vtkDoubleArray, + vtkPoints, +) +from vtkmodules.vtkCommonDataModel import ( + vtkCellData, + vtkCompositeDataSet, + vtkDataObject, + vtkMultiBlockDataSet, + vtkPolyData, + vtkTable, + vtkUnstructuredGrid, +) + +from geos_utils.GeosOutputsConstants import ( + ComponentNameEnum, + GeosMeshOutputsEnum, +) + +# valid sources for Python view configurator +# TODO: need to be consolidated +HARD_CODED_VALID_PVC_TYPE: set[str] = {"GeosLogReader", "RenameArrays"} + + +def vtkTableToDataframe(table: vtkTable) -> pd.DataFrame: + """From a vtkTable, creates and returns a pandas dataframe. + + Args: + table (vtkTable): vtkTable object. + + Returns: + pd.DataFrame: Pandas dataframe. + """ + data: list[dict[str, Any]] = [] + for rowIndex in range(table.GetNumberOfRows()): + rowData: dict[str, Any] = {} + for colIndex in range(table.GetNumberOfColumns()): + colName: str = table.GetColumnName(colIndex) + cellValue: Any = table.GetValue(rowIndex, colIndex) + # we have a vtkVariant value, we need a float + cellValueF: float = cellValue.ToFloat() + rowData[colName] = cellValueF + data.append(rowData) + df: pd.DataFrame = pd.DataFrame(data) + return df + + +def vtkPolyDataToPointsDataframe(polydata: vtkPolyData) -> pd.DataFrame: + """Creates a pandas dataframe containing points data from vtkPolyData. + + Args: + polydata (vtkPolyData): vtkPolyData object. + + Returns: + pd.DataFrame: Pandas dataframe containing the points data. + """ + points: vtkPoints = polydata.GetPoints() + assert points is not None, "Points is undefined." + nbrPoints: int = points.GetNumberOfPoints() + data: dict[str, Any] = { + "Point ID": np.empty(nbrPoints), + "PointsX": np.empty(nbrPoints), + "PointsY": np.empty(nbrPoints), + "PointsZ": np.empty(nbrPoints), + } + for pointID in range(nbrPoints): + point: tuple[float, float, float] = points.GetPoint(pointID) + data["Point ID"][pointID] = pointID + data["PointsX"][pointID] = point[0] + data["PointsY"][pointID] = point[1] + data["PointsZ"][pointID] = point[2] + pointData = polydata.GetPointData() + nbrArrays: int = pointData.GetNumberOfArrays() + for i in range(nbrArrays): + arrayToUse = pointData.GetArray(i) + arrayName: str = pointData.GetArrayName(i) + subArrayNames: list[str] = findSubArrayNames(arrayToUse, arrayName) + # Collect the data for each sub array + for ind, name in enumerate(subArrayNames): + data[name] = np.empty(nbrPoints) + for k in range(nbrPoints): + # Every element of the tuple correspond to one distinct + # sub array so we only need one value at a time + value: float = arrayToUse.GetTuple(k)[ind] + data[name][k] = value + df: pd.DataFrame = pd.DataFrame(data).set_index("Point ID") + return df + + +def vtkUnstructuredGridCellsToDataframe(grid: vtkUnstructuredGrid) -> pd.DataFrame: + """Creates a pandas dataframe containing points data from vtkUnstructuredGrid. + + Args: + grid (vtkUnstructuredGrid): vtkUnstructuredGrid object. + + Returns: + pd.DataFrame: Pandas dataframe. + """ + cellIdAttributeName = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName + cellData = grid.GetCellData() + numberCells: int = grid.GetNumberOfCells() + data: dict[str, Any] = {} + for i in range(cellData.GetNumberOfArrays()): + arrayToUse = cellData.GetArray(i) + arrayName: str = cellData.GetArrayName(i) + subArrayNames: list[str] = findSubArrayNames(arrayToUse, arrayName) + # Collect the data for each sub array + for ind, name in enumerate(subArrayNames): + data[name] = np.empty(numberCells) + for k in range(numberCells): + # Every element of the tuple correspond to one distinct + # sub array so we only need one value at a time + value: float = arrayToUse.GetTuple(k)[ind] + data[name][k] = value + df: pd.DataFrame = pd.DataFrame(data).astype({cellIdAttributeName: int}) + + # set cell ids as index + + # df = df.astype({cellIdAttributeName: int}) + return df.set_index(cellIdAttributeName) + + +def vtkToDataframe(dataset: vtkDataObject) -> pd.DataFrame: + """Creates a dataframe containing points data from vtkTable or vtkPolyData. + + Args: + dataset (Any): dataset to convert if possible. + + Returns: + pd.DataFrame: if the dataset is in the right format. + """ + if isinstance(dataset, vtkTable): + return vtkTableToDataframe(dataset) + elif isinstance(dataset, vtkPolyData): + return vtkPolyDataToPointsDataframe(dataset) + elif isinstance(dataset, vtkUnstructuredGrid): + return vtkUnstructuredGridCellsToDataframe(dataset) + else: + raise AssertionError( + f"Invalid dataset format {type(dataset)}. " + + "Supported formats are: vtkTable, vtkpolyData and vtkUnstructuredGrid" + ) + + +def findSubArrayNames(vtkArray: vtkDataArray, arrayName: str) -> list[str]: + """Get sub array names from multi array attributes. + + Because arrays in ParaView can be of multiple dimensions, + it can be difficult to convert these arrays to numpy arrays. + Therefore, we can split the original array into multiple sub + one dimensional arrays. In that case, new sub names need to be + derived from the original array to be used. + + Args: + vtkArray (vtkDataArray): Array from vtk library. + arrayName (str): Name of the array. + + Returns: + list[str]: Sub array names from original array name. + """ + # The ordering of six elements can seem odd but is adapted to + # Geos output format of stress as : + # sigma11, sigma22, sigma33, sigma23, sigma13, sigma12 + sixComponents: tuple[str, str, str, str, str, str] = ComponentNameEnum.XYZ.value + nbrComponents: int = vtkArray.GetNumberOfComponents() + subArrayNames: list[str] = [] + if nbrComponents == 1: + subArrayNames.append(arrayName) + elif nbrComponents < 6: + for j in range(nbrComponents): + subArrayNames.append(arrayName + "_" + sixComponents[j]) + else: + for j in range(nbrComponents): + subArrayNames.append(arrayName + "_" + str(j)) + return subArrayNames + + +def getDataframesFromMultipleVTKSources( + sourceNames: set[str], commonColumn: str +) -> list[pd.DataFrame]: + """Creates the dataframe from each source if they have the commonColumn. + + Args: + sourceNames (set[str]): list of sources. + commonColumn (str): common column name. + + Returns: + list[pd.DataFrame]: output dataframe. + """ + # indexSource: int = commonColumn.rfind("__") + # commonColumnNoSource: str = commonColumn[:indexSource] + validDataframes: list[pd.DataFrame] = [] + for name in sourceNames: + source = FindSource(name) + assert source is not None, "Source is undefined." + dataset = servermanager.Fetch(source) + assert dataset is not None, "Dataset is undefined." + currentDF: pd.DataFrame = vtkToDataframe(dataset) + if commonColumn in currentDF.columns: + dfModified = currentDF.rename( + columns={ + col: col + "__" + name + for col in currentDF.columns + if col != commonColumn + } + ) + validDataframes.append(dfModified) + else: + print( + f"The source <<{name}>> could not be used" + + " to plot because the variable named <<" + + f"{commonColumn}>> could not be found." + ) + return validDataframes + + +def mergeDataframes(dataframes: list[pd.DataFrame], commonColumn: str) -> pd.DataFrame: + """Merge all dataframes into a single one by using the common column. + + Args: + dataframes (list[pd.DataFrame]): List of dataframes from + getDataframesFromMultipleVTKSources. + commonColumn (str): Name of the only common column between + all of the dataframes. + + Returns: + pd.DataFrame: Merged dataframes into a single one by 'outer' + on the commonColumn. + """ + assert len(dataframes) > 0 + if len(dataframes) == 1: + return dataframes[0] + else: + df0: pd.DataFrame = dataframes[0] + df1: pd.DataFrame = dataframes[1] + merged: pd.DataFrame = df0.merge(df1, on=commonColumn, how="outer") + if len(dataframes) > 2: + for df in dataframes[2:]: + merged = merged.merge(df, on=commonColumn, how="outer") + return merged + + +def addDataframeColumnsToVtkPolyData( + polyData: vtkPolyData, df: pd.DataFrame +) -> vtkPolyData: + """Add columns from a dataframe to a vtkPolyData. + + Args: + polyData (vtkPolyData): vtkPolyData before modifcation. + df (pd.DataFrame): Pandas dataframe. + + Returns: + vtkPolyData: vtkPolyData with new arrays. + """ + for column_name in df.columns: + column = df[column_name].values + array = vtkDoubleArray() + array.SetName(column_name) + array.SetNumberOfValues(polyData.GetNumberOfPoints()) + for i in range(polyData.GetNumberOfPoints()): + array.SetValue(i, column[i]) + polyData.GetPointData().AddArray(array) + + # Update vtkPolyData object + polyData.GetPointData().Modified() + polyData.Modified() + return polyData + + +# Functions to help the processing of PythonViewConfigurator + + +def getPossibleSourceNames() -> set[str]: + """Get the list of valid source names for PythonViewConfigurator. + + In PythonViewConfigurator, multiple sources can be considered as + valid inputs. We want the user to know the names of every of these + sources that can be used to plot data. This function therefore identifies + which source names are valid to be used later as sources. + + Returns: + set[str]: Source names in the paraview pipeline. + """ + # get all sources different from PythonViewConfigurator + validNames: set[str] = set() + for k in GetSources(): + sourceName: str = k[0] + source = FindSource(sourceName) + if (source is not None) and ("PythonViewConfigurator" not in source.__str__()): + dataset = servermanager.Fetch(source) + if dataset.IsA("vtkPolyData") or dataset.IsA("vtkTable"): + validNames.add(sourceName) + return validNames + + +def usefulSourceNamesPipeline() -> set[str]: + """Get the list of valid pipelines for PythonViewConfigurator. + + When using the PythonViewConfigurator, we want to check if the sources + in the ParaView pipeline are compatible with what the filter can take as + input. So this function scans every sources of the pipeline and if it + corresponds to one of the hardcoded valid types, we keep the name. + They are right now : ["GeosLogReader", "RenameArrays"] + + Returns: + set[str]: [sourceName1, ..., sourceNameN] + """ + usefulSourceNames: set[str] = set() + allSourceNames: set[str] = {n[0] for n, s in GetSources().items()} + for name in allSourceNames: + source = FindSource(name) + if type(source).__name__ in HARD_CODED_VALID_PVC_TYPE: + usefulSourceNames.add(name) + return usefulSourceNames + + +def getDatasFromSources(sourceNames: set[str]) -> dict[str, pd.DataFrame]: + """Get the data from input sources. + + Args: + sourceNames (set[str]): [sourceName1, ..., sourceNameN] + + Returns: + dict[[str, pd.DataFrame]]: dictionary where source names are keys and + dataframe are values. + { sourceName1: servermanager.Fetch(FindSource(sourceName1)), + ... + sourceNameN: servermanager.Fetch(FindSource(sourceNameN)) } + """ + usefulDatas: dict[str, Any] = {} + for name in sourceNames: + dataset = servermanager.Fetch(FindSource(name)) + usefulDatas[name] = dataset + return usefulDatas + + +def usefulVisibleDatasPipeline() -> dict[str, Any]: + """Get the list of visible pipelines. + + When using the PythonViewConfigurator, we want to collect the data of + each source that is visible in the paraview pipeline and that is + compatible as input data for the filter. Therefore, only certain types of + sources will be considered as valid. They are right now : + ["GeosLogReader", "RenameArrays"] + + Finally, if the sources are visible and valid, we access their data and + return the names of the source and their respective data. + + Returns: + dict[str, 'vtkInformation']: dictionary of source names and data from + pipeline. + { sourceName1: servermanager.Fetch(FindSource(sourceName1)), + ... + sourceNameN: servermanager.Fetch(FindSource(sourceNameN)) } + """ + usefulDatas: dict[str, Any] = {} + sourceNamesVisible: set[str] = set() + for n, s in GetSources().items(): + if servermanager.GetRepresentation(s, GetActiveView()) is not None: + displayProperties = GetDisplayProperties(s, view=GetActiveView()) + if (displayProperties is not None) and (displayProperties.Visibility == 1): + sourceNamesVisible.add(n[0]) + + for name in sourceNamesVisible: + source = FindSource(name) + if type(source).__name__ in HARD_CODED_VALID_PVC_TYPE: + usefulDatas[name] = servermanager.Fetch(FindSource(name)) + return usefulDatas + + +def isFilter(sourceName: str) -> bool: + """Identify if a source name can link to a filter in the ParaView pipeline. + + Args: + sourceName (str): name of a source object in the pipeline + + Returns: + bool: True if filter, False instead. + """ + source: Any = FindSource(sourceName) + if source is None: + print(f"sourceName <<{sourceName}>> does not exist in the pipeline") + return False + else: + try: + test: Any = source.GetClientSideObject().GetInputAlgorithm() # noqa: F841 + return True + except Exception: + return False + + +def getFilterInput(sourceName: str) -> vtkDataObject: + """Access the vtk dataset that is used as input for a filter. + + Args: + sourceName (str): name of a source object in the pipeline. + + Returns: + Any: The vtk dataset that serves as input for the filter. + """ + filtre = FindSource(sourceName) + assert filtre is not None, "Source is undefined." + clientSideObject = filtre.GetClientSideObject() + assert clientSideObject is not None, "Client Side Object is undefined." + inputAlgo = clientSideObject.GetInputAlgorithm() + assert inputAlgo is not None, "Input Algorithm is undefined." + inputValues = inputAlgo.GetInput() + if isinstance(inputValues, vtkDataObject): + return inputValues + return vtkDataObject() + + +def getArrayChoices(array: vtkDataArraySelection) -> list[str]: + """Extracts the column names of input array when they are enabled. + + Args: + array (vtkDataArraySelection): input data + + Returns: + set[str]: [columnName1, ..., columnNameN] + """ + checkedColumns: list[str] = [] + for i in range(array.GetNumberOfArrays()): + columnName: str = array.GetArrayName(i) + if array.ArrayIsEnabled(columnName): + checkedColumns.append(columnName) + return checkedColumns + + +def integrateSourceNames(sourceNames: set[str], arrayChoices: set[str]) -> set[str]: + """Aggregate source and arrayChoices names. + + When creating the user choices in PythonViewConfigurator, you need + to take into account both the source names and the choices of curves + to have user choices corresponding to the column names of the dataframe + with the data to be plot. + + Args: + sourceNames (set[str]): Name of sources found in ParaView pipeline. + arrayChoices (set[str]): Column names of the vtkdataarrayselection. + + Returns: + set[str]: [sourceName1__choice1, sourceName1__choice2, + ..., sourceNameN__choiceN] + """ + completeNames: set[str] = set() + for sourceName in sourceNames: + for choice in arrayChoices: + completeName: str = choice + "__" + sourceName + completeNames.add(completeName) + return completeNames + + +def getVtkOriginalCellIds( + mesh: Union[vtkMultiBlockDataSet, vtkCompositeDataSet, vtkDataObject] +) -> list[str]: + """Get vtkOriginalCellIds from a vtkUnstructuredGrid object. + + Args: + mesh (vtkMultiBlockDataSet|vtkCompositeDataSet|vtkDataObject): input mesh. + + Returns: + list[str]: ids of the cells. + """ + # merge blocks for vtkCompositeDataSet + mesh2: vtkUnstructuredGrid = mergeFilterPV(mesh) + attributeName: str = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName + data: vtkCellData = mesh2.GetCellData() + assert data is not None, "Cell Data are undefined." + assert bool(data.HasArray(attributeName)), f"Attribute {attributeName} is not in the mesh" + + array: vtkDoubleArray = data.GetArray(attributeName) + nparray: npt.NDArray[np.float64] = vnp.vtk_to_numpy(array) # type: ignore[no-untyped-call] + return [str(int(ide)) for ide in nparray] + + +def strEnumToEnumerationDomainXml(enumObj: Enum) -> str: + """Creates an enumeration domain from an Enum objec. + + Creates an enumeration domain from an Enum objec + for the dropdown widgets of paraview plugin. + + Args: + enumObj (Enum): Enumeration values to put in the dropdown widget. + + Returns: + str: the XML string. + """ + xml: str = """""" + for i, unitObj in enumerate(list(enumObj)): # type: ignore[call-overload] + xml += f"""""" + xml += """""" + return xml + + +def strListToEnumerationDomainXml(properties: Union[list[str], set[str]]) -> str: + """Creates an enumeration domain from a list of strings. + + Creates an enumeration domain from a list of strings + for the dropdown widgets of paraview plugin. + + Args: + properties (set[str] | list[str]): Properties to put in the dropdown widget. + + Returns: + str: the XML string. + """ + xml: str = """""" + for i, prop in enumerate(list(properties)): + xml += f"""""" + xml += """""" + return xml + + +def dataframeForEachTimestep(sourceName: str) -> dict[str, pd.DataFrame]: + """Get the data from source at each time step. + + In ParaView, a source object can contain data for multiple + timesteps. If so, knowing the source name, we can access its data + for each timestep and store it in a dict where the keys are the + timesteps and the values the data at each one of them. + + Args: + sourceName (str): Name of the source in ParaView pipeline. + + Returns: + dict[str, pd.DataFrame]: dictionary where time is the key and dataframe + is the value. + """ + animationScene = GetAnimationScene() + assert animationScene is not None, "animationScene is undefined." + # we set the animation to the initial timestep + animationScene.GoToFirst() + source = FindSource(sourceName) + dataset: vtkDataObject = servermanager.Fetch(source) + assert dataset is not None, f"Dataset is undefined." + dataset2: vtkUnstructuredGrid = mergeFilterPV(dataset) + time: str = str(animationScene.TimeKeeper.Time) + dfPerTimestep: dict[str, pd.DataFrame] = {time: vtkToDataframe(dataset2)} + # then we iterate on the other timesteps of the source + for _ in range(animationScene.NumberOfFrames): # type: ignore + animationScene.GoToNext() + source = FindSource(sourceName) + dataset = servermanager.Fetch(source) + dataset2 = mergeFilterPV(dataset) + time = str(animationScene.TimeKeeper.Time) + dfPerTimestep[time] = vtkToDataframe(dataset2) + return dfPerTimestep + + +def getTimeStepIndex(time: float, timeSteps: npt.NDArray[np.float64]) -> int: + """Get the time step index of input time from the list of time steps. + + Args: + time (float): time + timeSteps (npt.NDArray[np.float64]): Array of time steps + + Returns: + int: time step index + """ + indexes: npt.NDArray[np.int64] = np.where(np.isclose(timeSteps, time))[0] + assert ( + indexes.size > 0 + ), f"Current time {time} does not exist in the selected object." + return int(indexes[0]) + + +def mergeFilterPV( + input: vtkDataObject, +) -> vtkUnstructuredGrid: + """Apply Paraview merge block filter. + + Args: + input (vtkMultiBlockDataSet | vtkCompositeDataSet | vtkDataObject): composite + object to merge blocks + + Returns: + vtkUnstructuredGrid: merged block object + + """ + mergeFilter: vtkMergeBlocks = vtkMergeBlocks() + mergeFilter.SetInputData(input) + mergeFilter.Update() + return mergeFilter.GetOutputDataObject(0) From a5a98197be0376815a1579e4ba9e0aa1b7cfbcba Mon Sep 17 00:00:00 2001 From: Martin LEMAY Date: Wed, 26 Mar 2025 17:52:10 +0100 Subject: [PATCH 02/20] Move PV tools and delete PVPythonViewConfigurator --- geos-posp/src/PVplugins/PVGeosLogReader.py | 624 -------- .../src/PVplugins/PVPythonViewConfigurator.py | 929 ----------- .../visu/PVUtils/checkboxFunction.py | 22 - .../visu/PVUtils/paraviewTreatments.py | 611 ------- .../visu/pythonViewUtils/Figure2DGenerator.py | 146 -- .../functionsFigure2DGenerator.py | 1424 ----------------- .../visu/pythonViewUtils/mainPythonView.py | 40 - .../geos_pv/geosLogReaderUtils}/__init__.py | 0 .../src/geos_pv/pyplotUtils}/__init__.py | 0 .../geos_pv/pyplotUtils}/matplotlibOptions.py | 0 .../utils}/DisplayOrganizationParaview.py | 0 11 files changed, 3796 deletions(-) delete mode 100644 geos-posp/src/PVplugins/PVGeosLogReader.py delete mode 100644 geos-posp/src/PVplugins/PVPythonViewConfigurator.py delete mode 100644 geos-posp/src/geos_posp/visu/PVUtils/checkboxFunction.py delete mode 100644 geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py delete mode 100644 geos-posp/src/geos_posp/visu/pythonViewUtils/Figure2DGenerator.py delete mode 100644 geos-posp/src/geos_posp/visu/pythonViewUtils/functionsFigure2DGenerator.py delete mode 100644 geos-posp/src/geos_posp/visu/pythonViewUtils/mainPythonView.py rename {geos-posp/src/geos_posp/visu/PVUtils => geos-pv/src/geos_pv/geosLogReaderUtils}/__init__.py (100%) rename {geos-posp/src/geos_posp/visu/pythonViewUtils => geos-pv/src/geos_pv/pyplotUtils}/__init__.py (100%) rename {geos-posp/src/geos_posp/visu/PVUtils => geos-pv/src/geos_pv/pyplotUtils}/matplotlibOptions.py (100%) rename {geos-posp/src/geos_posp/visu/PVUtils => geos-pv/src/geos_pv/utils}/DisplayOrganizationParaview.py (100%) diff --git a/geos-posp/src/PVplugins/PVGeosLogReader.py b/geos-posp/src/PVplugins/PVGeosLogReader.py deleted file mode 100644 index b35cb1c6..00000000 --- a/geos-posp/src/PVplugins/PVGeosLogReader.py +++ /dev/null @@ -1,624 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# ruff: noqa: E402 # disable Module level import not at top of file -import os -import sys -from enum import Enum -from typing import Union, cast - -import numpy as np -import numpy.typing as npt -import pandas as pd # type: ignore[import-untyped] -from typing_extensions import Self - -dir_path = os.path.dirname(os.path.realpath(__file__)) -parent_dir_path = os.path.dirname(dir_path) -if parent_dir_path not in sys.path: - sys.path.append(parent_dir_path) - -import vtkmodules.util.numpy_support as vnp -from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, - smdomain, - smhint, - smproperty, - smproxy, -) -from vtk import VTK_DOUBLE # type: ignore[import-untyped] -from vtkmodules.vtkCommonCore import vtkDataArraySelection as vtkDAS -from vtkmodules.vtkCommonCore import ( - vtkDoubleArray, - vtkInformation, - vtkInformationVector, -) -from vtkmodules.vtkCommonDataModel import vtkTable - -from geos_posp.processing.geosLogReaderFunctions import ( - identifyProperties, - transformUserChoiceToListPhases, -) -from geos_posp.readers.GeosLogReaderAquifers import GeosLogReaderAquifers -from geos_posp.readers.GeosLogReaderConvergence import GeosLogReaderConvergence -from geos_posp.readers.GeosLogReaderFlow import GeosLogReaderFlow -from geos_posp.readers.GeosLogReaderWells import GeosLogReaderWells -from geos_posp.utils.enumUnits import ( - Mass, - MassRate, - Pressure, - Time, - Unit, - Volume, - VolumetricRate, - enumerationDomainUnit, -) -from geos_posp.utils.UnitRepository import UnitRepository -from geos_posp.visu.PVUtils.checkboxFunction import ( # type: ignore[attr-defined] - createModifiedCallback, -) -from geos_posp.visu.PVUtils.paraviewTreatments import ( - strListToEnumerationDomainXml, -) - -__doc__ = """ -GeosLogRePVGeosLogReaderader is a Paraview plugin that allows to read Geos output log. - -Input is a file and output is a vtkTable containing log information. - -..WARNING:: - - The reader is compliant with GEOS log before commit version #9365098. - For more recent version, use the csv or hdf5 export options from GEOS. - -To use it: - -* Load the module in Paraview: Tools>Manage Plugins...>Load new>PVGeosLogReader. -* Open (File>Open...) and Select Geos output log .out/.txt file. -* In the "Open data with..." window, Select PVGeosLogReader reader. - -""" - - -@smproxy.reader( - name="PVGeosLogReader", - label="Geos Log Reader", - extensions=["txt", "out"], - file_description="txt and out files of GEOS log files", -) -class PVGeosLogReader(VTKPythonAlgorithmBase): - def __init__(self: Self) -> None: - """Paraview reader for Geos log files ."txt" or ".out". - - Output is a vtkTable with data extracted from the log. - """ - super().__init__(nInputPorts=0, nOutputPorts=1, outputType="vtkTable") - self.m_filepath: str = "" - self.m_phasesUserChoice: list[str] = [] - self.m_dataframeChoice: int = 0 - self.m_dataframe: pd.DataFrame - self.m_numberWellsMean: int = 1 - - # checkboxes values - self.m_useSIUnits: int = 0 - self.m_pressureUnit: int = 0 - self.m_bhpUnit: int = 0 - self.m_stressUnit: int = 0 - self.m_timeUnit: int = 0 - self.m_massUnit: int = 0 - self.m_volumeUnit: int = 0 - self.m_volumetricRateUnit: int = 0 - self.m_massRateUnit: int = 0 - self.m_densityUnit: int = 0 - - # for selection of properties - self.m_propertiesFlow: vtkDAS = vtkDAS() - self.m_propertiesFlow.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - propsFlow: list[str] = [ - "DeltaPressure", - "Pressure", - "Temperature", - "TotalDynamicPoreVolume", - "DynamicPoreVolumes", - "NonTrapped", - "Trapped", - "Immobile", - "Mobile", - "Dissolved", - "TotalFluidMass", - "CellFluidMass", - ] - for prop in propsFlow: - self.m_propertiesFlow.AddArray(prop) - - self.m_propertiesWells: vtkDAS = vtkDAS() - self.m_propertiesWells.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - propsWells: list[str] = [ - "MeanBHP", - "MeanTotalMassRate", - "MeanTotalVolumetricRate", - "MeanSurfaceVolumetricRate", - "TotalMassRate", - "TotalVolumetricRate", - "SurfaceVolumetricRate", - "Mass", - "BHP", - ] - for prop in propsWells: - self.m_propertiesWells.AddArray(prop) - - self.m_propertiesAquifers: vtkDAS = vtkDAS() - self.m_propertiesAquifers.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - propsAquifers: list[str] = [ - "Volume", - "VolumetricRate", - "CumulatedVolume", - "CumulatedVolumetricRate", - ] - for prop in propsAquifers: - self.m_propertiesAquifers.AddArray(prop) - - self.m_convergence: vtkDAS = vtkDAS() - self.m_convergence.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - propsSolvers: list[str] = ["NewtonIter", "LinearIter"] - for prop in propsSolvers: - self.m_convergence.AddArray(prop) - - @smproperty.stringvector( - name="DataFilepath", default_values="Enter a filepath to your data" - ) - @smdomain.filelist() - @smhint.filechooser(extensions=["txt", "out"], file_description="Data files") - def a01SetFilepath(self: Self, filepath: str) -> None: - """Set Geos log file path. - - Args: - filepath (str): path to the file. - - Raises: - FileNotFoundError: file not found. - """ - if filepath != "Enter a filepath to your data": - if not os.path.exists(filepath): - raise FileNotFoundError(f"Invalid filepath {filepath}") - else: - self.m_filepath = filepath - self.Modified() - - def getFilepath(self: Self) -> str: - """Get Geos log file path. - - Returns: - str: filepath. - """ - return self.m_filepath - - @smproperty.stringvector( - name="EnterPhaseNames", label="Enter Phase Names", default_values="" - ) - @smdomain.xml( - """ - Please enter your phase names as phase0, phase1, phase2. - """ - ) - def a02SetPhaseNames(self: Self, value: str) -> None: - """Set phase names. - - Args: - value (str): list of phase names seprated by space. - """ - self.m_phasesUserChoice = transformUserChoiceToListPhases(value) - self.Modified() - - def getPhasesUserChoice(self: Self) -> list[str]: - """Access the phases from the user input. - - Returns: - list[str]: phase names. - """ - return self.m_phasesUserChoice - - @smproperty.intvector( - name="DataframeChoice", - number_of_elements=1, - label="DataframeChoice", - default_values=0, - ) - @smdomain.xml( - strListToEnumerationDomainXml(["Flow", "Wells", "Aquifers", "Convergence"]) - ) - def a03SetDataFrameChoice(self: Self, value: int) -> None: - """Set reader choice: 0:Flow, 1:Wells, 2:Aquifers, 3:Convergence. - - Args: - value (int): user choice. - """ - self.m_dataframeChoice = value - self.Modified() - - def getDataframeChoice(self: Self) -> int: - """Accesses the choice of dataframe from the user. - - Returns: - int: The value corresponding to a certain dataframe. - "Flow" has value "0", "Wells" has value "1", - "Aquifers" has value "2", "Convergence" has - value "3". - """ - return self.m_dataframeChoice - - @smproperty.xml( - """ - - - - """ - ) - def a04PropertyGroup(self: Self) -> None: - """Organized group.""" - self.Modified() - - @smproperty.dataarrayselection(name="FlowProperties") - def a05SetPropertiesFlow(self: Self) -> vtkDAS: - """Use Flow.""" - return self.m_propertiesFlow - - @smproperty.xml( - """ - - - """ - ) - def a06GroupFlow(self: Self) -> None: - """Organized group.""" - self.Modified() - - @smproperty.dataarrayselection(name="WellsProperties") - def a07SetPropertiesWells(self: Self) -> vtkDAS: - """Use wells.""" - return self.m_propertiesWells - - @smproperty.intvector(name="NumberOfWellsForMeanCalculation", default_values=1) - def a08SetTheNumberOfWellsMean(self: Self, number: int) -> None: - """Set number of wells. - - Args: - number (int): number of wells. - """ - self.m_numberWellsMean = number - self.Modified() - - def getNumberOfWellsMean(self: Self) -> int: - """Get the number of wells. - - Returns: - int: The number of wells to consider. - """ - return self.m_numberWellsMean - - @smproperty.xml( - """ - - - - """ - ) - def a09GroupWells(self: Self) -> None: - """Organized group.""" - self.Modified() - - @smproperty.dataarrayselection(name="AquifersProperties") - def a10SetPropertiesAquifers(self: Self) -> vtkDAS: - """Use aquifers.""" - return self.m_propertiesAquifers - - @smproperty.xml( - """ - - - """ - ) - def a11GroupAquifers(self: Self) -> None: - """Organized group.""" - self.Modified() - - @smproperty.dataarrayselection(name="Convergence") - def a12SetConvergence(self: Self) -> vtkDAS: - """Use convergence.""" - return self.m_convergence - - @smproperty.xml( - """ - - - """ - ) - def a13GroupSolvers(self: Self) -> None: - """Organized group.""" - self.Modified() - - def getIdsToUse(self: Self) -> list[str]: - """Get property ids. - - Using the checkbox choices of the user for metaproperties, - we get the list of ids to map the dataframe properties with the - properties. - - Returns: - list(str): Ids of the metaproperties. - """ - dataArrays: dict[int, vtkDAS] = { - 0: self.m_propertiesFlow, - 1: self.m_propertiesWells, - 2: self.m_propertiesAquifers, - 3: self.m_convergence, - } - dataArrayToUse = dataArrays[self.getDataframeChoice()] - propertyNames: list[str] = [] - for i in range(dataArrayToUse.GetNumberOfArrays()): - propName: str = dataArrayToUse.GetArrayName(i) - if dataArrayToUse.ArrayIsEnabled(propName) == 1: - propertyNames.append(propName) - propertiesWithId: list[str] = identifyProperties(propertyNames) - onlyIds: list[str] = [] - for propId in propertiesWithId: - idFound: str = propId.split(":")[0] - onlyIds.append(idFound) - return onlyIds - - @smproperty.intvector(name="UseSIUnits", label="UseSIUnits", default_values=1) - @smdomain.xml("""""") - def b01SetUseSIUnits(self: Self, value: int) -> None: - """Set Use SI Units. - - Args: - value (int): user choice. - """ - self.m_useSIUnits = value - self.Modified() - - @smproperty.intvector( - name="Pressure", label="Pressure", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Pressure))) - def b02SetPressureUnit(self: Self, value: int) -> None: - """Set pressure unit. - - Args: - value (int): user choice. - """ - self.m_pressureUnit = value - self.Modified() - - @smproperty.intvector( - name="BHP", label="BHP", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Pressure))) - def b03SetBHPUnit(self: Self, value: int) -> None: - """Set BHP unit. - - Args: - value (int): user choice. - """ - self.m_bhpUnit = value - self.Modified() - - @smproperty.intvector( - name="Time", label="Time", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Time))) - def b04SetTimeUnit(self: Self, value: int) -> None: - """Set time unit. - - Args: - value (int): user choice. - """ - self.m_timeUnit = value - self.Modified() - - @smproperty.intvector( - name="Mass", label="Mass", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Mass))) - def b05SetMassUnit(self: Self, value: int) -> None: - """Set mass unit. - - Args: - value (int): user choice. - """ - self.m_massUnit = value - self.Modified() - - @smproperty.intvector( - name="Volume", label="Volume", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Volume))) - def b06SetVolumeUnit(self: Self, value: int) -> None: - """Set volume unit. - - Args: - value (int): user choice. - """ - self.m_volumeUnit = value - self.Modified() - - @smproperty.intvector( - name="VolumetricRate", - label="VolumetricRate", - default_values=0, - panel_visibility="default", - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, VolumetricRate))) - def b07SetVolumetricRateUnit(self: Self, value: int) -> None: - """Set volumetric rate unit. - - Args: - value (int): user choice. - """ - self.m_volumetricRateUnit = value - self.Modified() - - @smproperty.intvector( - name="MassRate", label="MassRate", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, MassRate))) - def b08SetMassRateUnit(self: Self, value: int) -> None: - """Set Mass rate unit. - - Args: - value (int): user choice. - """ - """""" - self.m_massRateUnit = value - self.Modified() - - @smproperty.xml( - """ - - - - - - - - - """ - ) - def b09GroupUnitsToUse(self: Self) -> None: - """Organize group.""" - self.Modified() - - def getUseSIUnits(self: Self) -> int: - """Acess the choice to use SI units or not. - - Returns: - int: 0 to not use SI units or 1 to use SI units. - """ - return self.m_useSIUnits - - def getUnitChoices(self: Self) -> dict[str, int]: - """Get the units choosen by the user. - - Based on the choice of using SI units or not, and if - not with the units chosen by the user, returns a dict - with metaproperties such as pressure, volume etc ... - with the unit associated. - - Returns: - dict[str, int]: empty dictionary if use SI unit, or - property name as keys and unit choice as values. - """ - unitChoices: dict[str, int] = {} - if not self.getUseSIUnits(): - unitChoices = { - "pressure": self.m_pressureUnit, - "stress": self.m_stressUnit, - "bhp": self.m_bhpUnit, - "mass": self.m_massUnit, - "massRate": self.m_massRateUnit, - "time": self.m_timeUnit, - "volume": self.m_volumeUnit, - "volumetricRate": self.m_volumetricRateUnit, - "density": self.m_densityUnit, - } - return unitChoices - - def createDataframe(self: Self) -> pd.DataFrame: - """Create dataframe with values from Geos log based on user choices. - - Returns: - pd.DataFrame: Dataframe with log values according to user choice. - """ - filepath: str = self.getFilepath() - phaseNames: list[str] = self.getPhasesUserChoice() - choice: int = self.getDataframeChoice() - userPropertiesUnits: dict[str, int] = self.getUnitChoices() - unitObj: UnitRepository = UnitRepository(userPropertiesUnits) - propertiesUnit: dict[str, Unit] = unitObj.getPropertiesUnit() - reader: Union[ - GeosLogReaderFlow, - GeosLogReaderWells, - GeosLogReaderAquifers, - GeosLogReaderConvergence, - ] - if choice == 0: - reader = GeosLogReaderFlow(filepath, propertiesUnit, phaseNames) - elif choice == 1: - nbrWells: int = self.getNumberOfWellsMean() - reader = GeosLogReaderWells(filepath, propertiesUnit, phaseNames, nbrWells) - elif choice == 2: - reader = GeosLogReaderAquifers(filepath, propertiesUnit) - elif choice == 3: - reader = GeosLogReaderConvergence(filepath, propertiesUnit) - return reader.createDataframe() - - def RequestInformation( - self: Self, - request: vtkInformation, # noqa: F841 - inInfoVec: list[vtkInformationVector], # noqa: F841 - outInfoVec: vtkInformationVector, - ) -> int: - """Inherited from VTKPythonAlgorithmBase::RequestInformation. - - Args: - request (vtkInformation): request - inInfoVec (list[vtkInformationVector]): input objects - outInfoVec (vtkInformationVector): output objects - - Returns: - int: 1 if calculation successfully ended, 0 otherwise. - """ - executive = self.GetExecutive() - outInfo = outInfoVec.GetInformationObject(0) - outInfo.Remove(executive.TIME_STEPS()) - outInfo.Remove(executive.TIME_RANGE()) - return 1 - - def RequestData( - self: Self, - request: vtkInformation, # noqa: F841 - inInfoVec: list[vtkInformationVector], # noqa: F841 - outInfoVec: vtkInformationVector, - ) -> int: - """Inherited from VTKPythonAlgorithmBase::RequestData. - - Args: - request (vtkInformation): request - inInfoVec (list[vtkInformationVector]): input objects - outInfoVec (vtkInformationVector): output objects - - Returns: - int: 1 if calculation successfully ended, 0 otherwise. - """ - try: - # we choose which dataframe to build and get it - idsToUse = self.getIdsToUse() - dataframe = self.createDataframe() - usefulColumns = [] - for column_name in list(dataframe.columns): - if ":" not in column_name: - usefulColumns.append(column_name) - else: - idFound = column_name.split(":")[0] - if idFound in idsToUse: - usefulColumns.append(column_name) - # we build the output vtkTable - output: vtkTable = vtkTable.GetData(outInfoVec, 0) - for column in usefulColumns: - pandas_series: pd.Series = dataframe[column] - array: npt.NDArray[np.float64] = pandas_series.values - if ":" in column: - column = column.split(":")[1] - - newAttr: vtkDoubleArray = vnp.numpy_to_vtk(array, deep=True, array_type=VTK_DOUBLE) # type: ignore[no-untyped-call] - newAttr.SetName(column) - output.AddColumn(newAttr) - except Exception as e: - print("Error while reading Geos log file:") - print(str(e)) - return 0 - return 1 diff --git a/geos-posp/src/PVplugins/PVPythonViewConfigurator.py b/geos-posp/src/PVplugins/PVPythonViewConfigurator.py deleted file mode 100644 index 9d63e707..00000000 --- a/geos-posp/src/PVplugins/PVPythonViewConfigurator.py +++ /dev/null @@ -1,929 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto, Martin Lemay -# ruff: noqa: E402 # disable Module level import not at top of file -import os -import sys -from typing import Any, Union, cast - -import pandas as pd # type: ignore[import-untyped] -from typing_extensions import Self - -dir_path = os.path.dirname(os.path.realpath(__file__)) -parent_dir_path = os.path.dirname(dir_path) -if parent_dir_path not in sys.path: - sys.path.append(parent_dir_path) - -from paraview.simple import ( # type: ignore[import-not-found] - GetActiveSource, - GetActiveView, - Render, - Show, - servermanager, -) -from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, - smdomain, - smhint, - smproperty, - smproxy, -) -from vtkmodules.vtkCommonCore import ( - vtkDataArraySelection, - vtkInformation, - vtkInformationVector, -) - -import geos_posp.visu.PVUtils.paraviewTreatments as pvt -from geos_posp.visu.PVUtils.checkboxFunction import ( # type: ignore[attr-defined] - createModifiedCallback, -) -from geos_posp.visu.PVUtils.DisplayOrganizationParaview import ( - DisplayOrganizationParaview, -) -from geos_posp.visu.PVUtils.matplotlibOptions import ( - FontStyleEnum, - FontWeightEnum, - LegendLocationEnum, - LineStyleEnum, - MarkerStyleEnum, - OptionSelectionEnum, - optionEnumToXml, -) - -__doc__ = """ -PVPythonViewConfigurator is a Paraview plugin that allows to create cross-plots -from input data using the PythonView. - -Input type is vtkDataObject. - -This filter results in opening a new Python View window and displaying cross-plot. - -To use it: - -* Load the module in Paraview: Tools>Manage Plugins...>Load new>PVPythonViewConfigurator. -* Select the vtkDataObject containing the data to plot. -* Search and Apply PVPythonViewConfigurator Filter. - -""" - - -@smproxy.filter(name="PVPythonViewConfigurator", label="Python View Configurator") -@smhint.xml('') -@smproperty.input(name="Input") -@smdomain.datatype(dataTypes=["vtkDataObject"], composite_data_supported=True) -class PVPythonViewConfigurator(VTKPythonAlgorithmBase): - def __init__(self: Self) -> None: - """Paraview plugin to create cross-plots in a Python View. - - Input is a vtkDataObject. - """ - super().__init__(nInputPorts=1, nOutputPorts=1) - # python view layout and object - self.m_layoutName: str = "" - self.m_pythonView: Any - self.m_organizationDisplay = DisplayOrganizationParaview() - self.buildNewLayoutWithPythonView() - - # input source and curve names - inputSource = GetActiveSource() - dataset = servermanager.Fetch(inputSource) - dataframe: pd.DataFrame = pvt.vtkToDataframe(dataset) - self.m_pathPythonViewScript: str = os.path.join( - parent_dir_path, "visu/pythonViewUtils/mainPythonView.py" - ) - - # checkboxes - self.m_modifyInputs: int = 1 - self.m_modifyCurves: int = 1 - self.m_multiplyCurves: int = 0 - - # checkboxes curves available from the data of pipeline - self.m_validSources = vtkDataArraySelection() - self.m_curvesToPlot = vtkDataArraySelection() - self.m_curvesMinus1 = vtkDataArraySelection() - self.m_validSources.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - self.m_curvesToPlot.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - self.m_curvesMinus1.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - validSourceNames: set[str] = pvt.getPossibleSourceNames() - for sourceName in validSourceNames: - self.m_validSources.AddArray(sourceName) - validColumnsDataframe: list[str] = list(dataframe.columns) - for name in list(dataframe.columns): - for axis in ["X", "Y", "Z"]: - if "Points" + axis in name and "Points" + axis + "__" in name: - positionDoublon: int = validColumnsDataframe.index("Points" + axis) - validColumnsDataframe.pop(positionDoublon) - break - self.m_validColumnsDataframe: list[str] = sorted( - validColumnsDataframe, key=lambda x: x.lower() - ) - for curveName in validColumnsDataframe: - self.m_curvesToPlot.AddArray(curveName) - self.m_curvesMinus1.AddArray(curveName) - self.m_validSources.DisableAllArrays() - self.m_curvesToPlot.DisableAllArrays() - self.m_curvesMinus1.DisableAllArrays() - self.m_curveToUse: str = "" - # to change the aspects of curves - self.m_curvesToModify: set[str] = pvt.integrateSourceNames( - validSourceNames, set(validColumnsDataframe) - ) - self.m_color: tuple[float, float, float] = (0.0, 0.0, 0.0) - self.m_lineStyle: str = LineStyleEnum.SOLID.optionValue - self.m_lineWidth: float = 1.0 - self.m_markerStyle: str = MarkerStyleEnum.NONE.optionValue - self.m_markerSize: float = 1.0 - - # user choices - self.m_userChoices: dict[str, Any] = { - "variableName": "", - "curveNames": [], - "curveConvention": [], - "inputNames": [], - "plotRegions": False, - "reverseXY": False, - "logScaleX": False, - "logScaleY": False, - "minorticks": False, - "displayTitle": True, - "title": "title1", - "titleStyle": FontStyleEnum.NORMAL.optionValue, - "titleWeight": FontWeightEnum.BOLD.optionValue, - "titleSize": 12, - "legendDisplay": True, - "legendPosition": LegendLocationEnum.BEST.optionValue, - "legendSize": 10, - "removeJobName": True, - "removeRegions": False, - "curvesAspect": {}, - } - - def getUserChoices(self: Self) -> dict[str, Any]: - """Access the m_userChoices attribute. - - Returns: - dict[str] : the user choices for the figure. - """ - return self.m_userChoices - - def getInputNames(self: Self) -> set[str]: - """Get source names from user selection. - - Returns: - set[str] : source names from ParaView pipeline. - """ - inputAvailables = self.a01GetInputSources() - inputNames: set[str] = set(pvt.getArrayChoices(inputAvailables)) - return inputNames - - def defineInputNames(self: Self) -> None: - """Adds the input names to the userChoices.""" - inputNames: set[str] = self.getInputNames() - self.m_userChoices["inputNames"] = inputNames - - def defineUserChoicesCurves(self: Self) -> None: - """Define user choices for curves to plot.""" - sourceNames: set[str] = self.getInputNames() - dasPlot = self.b02GetCurvesToPlot() - dasMinus1 = self.b07GetCurveConvention() - curveNames: set[str] = set(pvt.getArrayChoices(dasPlot)) - minus1Names: set[str] = set(pvt.getArrayChoices(dasMinus1)) - toUse1: set[str] = pvt.integrateSourceNames(sourceNames, curveNames) - toUse2: set[str] = pvt.integrateSourceNames(sourceNames, minus1Names) - self.m_userChoices["curveNames"] = tuple(toUse1) - self.m_userChoices["curveConvention"] = tuple(toUse2) - - def defineCurvesAspect(self: Self) -> None: - """Define user choices for curve aspect properties.""" - curveAspect: tuple[tuple[float, float, float], str, float, str, float] = ( - self.getCurveAspect() - ) - curveName: str = self.getCurveToUse() - self.m_userChoices["curvesAspect"][curveName] = curveAspect - - def buildPythonViewScript(self: Self) -> str: - """Builds the Python script used to launch the Python View. - - The script is returned as a string to be then injected in the Python - View. - - Returns: - str: Complete Python View script. - """ - sourceNames: set[str] = self.getInputNames() - userChoices: dict[str, Any] = self.getUserChoices() - script: str = f"timestep = '{str(GetActiveView().ViewTime)}'\n" - script += f"sourceNames = {sourceNames}\n" - script += f"variableName = '{userChoices['variableName']}'\n" - script += f"dir_path = '{dir_path}'\n" - script += f"userChoices = {userChoices}\n\n\n" - with open(self.m_pathPythonViewScript) as file: - fileContents = file.read() - script += fileContents - return script - - def buildNewLayoutWithPythonView(self: Self) -> None: - """Create a new Python View layout.""" - # we first built the new layout - layout_names: list[str] = self.m_organizationDisplay.getLayoutsNames() - nb_layouts: int = len(layout_names) - # imagine two layouts already exists, the new one will be named "Layout #3" - layoutName: str = "Layout #" + str(nb_layouts + 1) - # check that we that the layoutName is new and does not belong to the list of layout_names, - # if not we modify the layoutName until it is a new one - if layoutName in layout_names: - cpt: int = 2 - while layoutName in layout_names: - layoutName = "Layout #" + str(nb_layouts + cpt) - cpt += 1 - self.m_organizationDisplay.addLayout(layoutName) - self.m_layoutName = layoutName - - # we then build the new python view - self.m_organizationDisplay.addViewToLayout("PythonView", layoutName, 0) - self.m_pythonView = self.m_organizationDisplay.getLayoutViews()[layoutName][0] - Show(GetActiveSource(), self.m_pythonView, "PythonRepresentation") - - # widgets definition - """The names of the @smproperty methods command names below have a letter in lower case in - front because PARAVIEW displays properties in the alphabetical order. - See https://gitlab.kitware.com/paraview/paraview/-/issues/21493 for possible improvements on - this issue""" - - @smproperty.dataarrayselection(name="InputSources") - def a01GetInputSources(self: Self) -> vtkDataArraySelection: - """Get all valid sources for the filter. - - Returns: - vtkDataArraySelection: valid data sources. - """ - return self.m_validSources - - @smproperty.xml( - """ - - """ - ) - def a02GroupFlow(self: Self) -> None: - """Organize groups.""" - self.Modified() - - @smproperty.stringvector(name="CurvesAvailable", information_only="1") - def b00GetCurvesAvailable(self: Self) -> list[str]: - """Get the available curves. - - Returns: - list[str]: list of curves. - """ - return self.m_validColumnsDataframe - - @smproperty.stringvector(name="Abscissa", number_of_elements="1") - @smdomain.xml( - """ - - """ - ) - def b01SetVariableName(self: Self, name: str) -> None: - """Set the name of X axis variable. - - Args: - name: name of the variable. - """ - self.m_userChoices["variableName"] = name - self.Modified() - - @smproperty.dataarrayselection(name="Ordinate") - def b02GetCurvesToPlot(self: Self) -> vtkDataArraySelection: - """Get the curves to plot. - - Returns: - vtkDataArraySelection: data to plot. - """ - return self.m_curvesToPlot - - @smproperty.intvector( - name="PlotsPerRegion", label="PlotsPerRegion", default_values=0 - ) - @smdomain.xml("""""") - def b03SetPlotsPerRegion(self: Self, boolean: bool) -> None: - """Set plot per region option. - - Args: - boolean: user choice. - """ - self.m_userChoices["plotRegions"] = boolean - self.Modified() - - @smproperty.xml( - """ - - - - """ - ) - def b04GroupFlow(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( - name="CurveConvention", - label="Select Curves To Change Convention", - default_values=0, - ) - @smdomain.xml("""""") - def b05SetCurveConvention(self: Self, boolean: bool) -> None: - """Select Curves To Change Convention. - - Args: - boolean: user choice. - """ - self.m_multiplyCurves = boolean - - @smproperty.xml( - """ - - """ - ) - def b06GroupFlow(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.dataarrayselection(name="CurveConventionSelection") - def b07GetCurveConvention(self: Self) -> vtkDataArraySelection: - """Get the curves to change convention. - - Returns: - vtkDataArraySelection: selected curves to change convention. - """ - return self.m_curvesMinus1 - - @smproperty.xml( - """ - - - """ - ) - def b08GroupFlow(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( - name="EditAxisProperties", label="Edit Axis Properties", default_values=0 - ) - @smdomain.xml("""""") - def c01SetEditAxisProperties(self: Self, boolean: bool) -> None: - """Set option to edit axis properties. - - Args: - boolean (bool): user choice. - """ - self.Modified() - - @smproperty.xml( - """ - - """ - ) - def c02GroupFlow(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector(name="ReverseXY", label="Reverse XY Axes", default_values=0) - @smdomain.xml("""""") - def c02SetReverseXY(self: Self, boolean: bool) -> None: - """Set option to reverse X and Y axes. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["reverseXY"] = boolean - self.Modified() - - @smproperty.intvector(name="LogScaleX", label="X Axis Log Scale", default_values=0) - @smdomain.xml("""""") - def c03SetReverseXY(self: Self, boolean: bool) -> None: - """Set option to log scale for X axis. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["logScaleX"] = boolean - self.Modified() - - @smproperty.intvector(name="LogScaleY", label="Y Axis Log Scale", default_values=0) - @smdomain.xml("""""") - def c04SetReverseXY(self: Self, boolean: bool) -> None: - """Set option to log scale for Y axis. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["logScaleY"] = boolean - self.Modified() - - @smproperty.intvector( - name="Minorticks", label="Display Minor ticks", default_values=0 - ) - @smdomain.xml("""""") - def c05SetMinorticks(self: Self, boolean: bool) -> None: - """Set option to display minor ticks. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["minorticks"] = boolean - self.Modified() - - @smproperty.intvector( - name="CustomAxisLim", label="Use Custom Axis Limits", default_values=0 - ) - @smdomain.xml("""""") - def c06SetCustomAxisLim(self: Self, boolean: bool) -> None: - """Set option to define axis limits. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["customAxisLim"] = boolean - self.Modified() - - @smproperty.doublevector(name="LimMinX", label="X min", default_values=-1e36) - def c07LimMinX(self: Self, value: float) -> None: - """Set X axis min. - - Args: - value (float): X axis min. - """ - value2: Union[float, None] = value - if value2 == -1e36: - value2 = None - self.m_userChoices["limMinX"] = value2 - self.Modified() - - @smproperty.doublevector(name="LimMaxX", label="X max", default_values=1e36) - def c08LimMaxX(self: Self, value: float) -> None: - """Set X axis max. - - Args: - value (float): X axis max. - """ - value2: Union[float, None] = value - if value2 == 1e36: - value2 = None - self.m_userChoices["limMaxX"] = value2 - self.Modified() - - @smproperty.doublevector(name="LimMinY", label="Y min", default_values=-1e36) - def c09LimMinY(self: Self, value: float) -> None: - """Set Y axis min. - - Args: - value (float): Y axis min. - """ - value2: Union[float, None] = value - if value2 == -1e36: - value2 = None - self.m_userChoices["limMinY"] = value2 - self.Modified() - - @smproperty.doublevector(name="LimMaxY", label="Y max", default_values=1e36) - def c10LimMaxY(self: Self, value: float) -> None: - """Set Y axis max. - - Args: - value (float): Y axis max. - """ - value2: Union[float, None] = value - if value2 == 1e36: - value2 = None - self.m_userChoices["limMaxY"] = value2 - self.Modified() - - @smproperty.xml( - """ - - - - - - """ - ) - def c11GroupFlow(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.xml( - """ - - - - - - - """ - ) - def c12GroupFlow(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector(name="DisplayTitle", label="Display Title", default_values=1) - @smdomain.xml("""""") - def d01SetDisplayTitle(self: Self, boolean: bool) -> None: - """Set option to display title. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["displayTitle"] = boolean - self.Modified() - - @smproperty.xml( - """ - - """ - ) - def d02GroupFlow(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.stringvector(name="Title", default_values="title1") - def d03SetTitlePlot(self: Self, title: str) -> None: - """Set title. - - Args: - title (str): title. - """ - self.m_userChoices["title"] = title - self.Modified() - - @smproperty.intvector(name="TitleStyle", label="Title Style", default_values=0) - @smdomain.xml(optionEnumToXml(cast(OptionSelectionEnum, FontStyleEnum))) - def d04SetTitleStyle(self: Self, value: int) -> None: - """Set title font style. - - Args: - value (int): title font style index in FontStyleEnum. - """ - choice = list(FontStyleEnum)[value] - self.m_userChoices["titleStyle"] = choice.optionValue - self.Modified() - - @smproperty.intvector(name="TitleWeight", label="Title Weight", default_values=1) - @smdomain.xml(optionEnumToXml(cast(OptionSelectionEnum, FontWeightEnum))) - def d05SetTitleWeight(self: Self, value: int) -> None: - """Set title font weight. - - Args: - value (int): title font weight index in FontWeightEnum. - """ - choice = list(FontWeightEnum)[value] - self.m_userChoices["titleWeight"] = choice.optionValue - self.Modified() - - @smproperty.intvector(name="TitleSize", label="Title Size", default_values=12) - @smdomain.xml("""""") - def d06SetTitleSize(self: Self, size: float) -> None: - """Set title font size. - - Args: - size (float): title font size between 1 and 50. - """ - self.m_userChoices["titleSize"] = size - self.Modified() - - @smproperty.xml( - """ - panel_visibility="advanced"> - - - - - - """ - ) - def d07PropertyGroup(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( - name="DisplayLegend", label="Display Legend", default_values=1 - ) - @smdomain.xml("""""") - def e00SetDisplayLegend(self: Self, boolean: bool) -> None: - """Set option to display legend. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["displayLegend"] = boolean - self.Modified() - - @smproperty.xml( - """ - - """ - ) - def e01PropertyGroup(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( - name="LegendPosition", label="Legend Position", default_values=0 - ) - @smdomain.xml(optionEnumToXml(cast(OptionSelectionEnum, LegendLocationEnum))) - def e02SetLegendPosition(self: Self, value: int) -> None: - """Set legend position. - - Args: - value (int): legend position index in LegendLocationEnum. - """ - choice = list(LegendLocationEnum)[value] - self.m_userChoices["legendPosition"] = choice.optionValue - self.Modified() - - @smproperty.intvector(name="LegendSize", label="Legend Size", default_values=10) - @smdomain.xml("""""") - def e03SetLegendSize(self: Self, size: float) -> None: - """Set legend font size. - - Args: - size (float): legend font size between 1 and 50. - """ - self.m_userChoices["legendSize"] = size - self.Modified() - - @smproperty.intvector( - name="RemoveJobName", label="Remove Job Name in legend", default_values=1 - ) - @smdomain.xml("""""") - def e04SetRemoveJobName(self: Self, boolean: bool) -> None: - """Set option to remove job names from legend. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["removeJobName"] = boolean - self.Modified() - - @smproperty.intvector( - name="RemoveRegionsName", - label="Remove Regions Name in legend", - default_values=0, - ) - @smdomain.xml("""""") - def e05SetRemoveRegionsName(self: Self, boolean: bool) -> None: - """Set option to remove region names from legend. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices["removeRegions"] = boolean - self.Modified() - - @smproperty.xml( - """ - - - - - - """ - ) - def e06PropertyGroup(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( - name="ModifyCurvesAspect", label="Edit Curve Graphics", default_values=1 - ) - @smdomain.xml("""""") - def f01SetModifyCurvesAspect(self: Self, boolean: bool) -> None: - """Set option to change curve aspects. - - Args: - boolean (bool): user choice. - """ - self.m_modifyCurvesAspect = boolean - - @smproperty.xml( - """ - - """ - ) - def f02PropertyGroup(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.stringvector(name="CurvesInfo", information_only="1") - def f03GetCurveNames(self: Self) -> list[str]: - """Get curves to modify aspects. - - Returns: - set[str]: curves to modify aspects. - """ - return list(self.m_curvesToModify) - - # TODO: still usefull? - @smproperty.stringvector(name="CurveToModify", number_of_elements="1") - @smdomain.xml( - """ - - """ - ) - def f04SetCircleID(self: Self, value: str) -> None: - """Set m_curveToUse. - - Args: - value (float): value of m_curveToUse - """ - self.m_curveToUse = value - self.Modified() - - def getCurveToUse(self: Self) -> str: - """Get m_curveToUse.""" - return self.m_curveToUse - - @smproperty.intvector(name="LineStyle", label="Line Style", default_values=1) - @smdomain.xml(optionEnumToXml(cast(OptionSelectionEnum, LineStyleEnum))) - def f05SetLineStyle(self: Self, value: int) -> None: - """Set line style. - - Args: - value (int): line style index in LineStyleEnum - """ - choice = list(LineStyleEnum)[value] - self.m_lineStyle = choice.optionValue - self.Modified() - - @smproperty.doublevector(name="LineWidth", default_values=1.0) - @smdomain.xml("""""") - def f06SetLineWidth(self: Self, value: float) -> None: - """Set line width. - - Args: - value (float): line width between 1 and 10. - """ - self.m_lineWidth = value - self.Modified() - - @smproperty.intvector(name="MarkerStyle", label="Marker Style", default_values=0) - @smdomain.xml(optionEnumToXml(cast(LegendLocationEnum, MarkerStyleEnum))) - def f07SetMarkerStyle(self: Self, value: int) -> None: - """Set marker style. - - Args: - value (int): Marker style index in MarkerStyleEnum - """ - choice = list(MarkerStyleEnum)[value] - self.m_markerStyle = choice.optionValue - self.Modified() - - @smproperty.doublevector(name="MarkerSize", default_values=1.0) - @smdomain.xml("""""") - def f08SetMarkerSize(self: Self, value: float) -> None: - """Set marker size. - - Args: - value (float): size of markers between 1 and 30. - """ - self.m_markerSize = value - self.Modified() - - @smproperty.xml( - """ - - - - - - - - """ - ) - def f09PropertyGroup(self: Self) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.doublevector( - name="ColorEnvelop", default_values=[0, 0, 0], number_of_elements=3 - ) - @smdomain.xml("""""") - def f10SetColor(self: Self, value0: float, value1: float, value2: float) -> None: - """Set envelope color. - - Args: - value0 (float): Red color between 0 and 1. - - value1 (float): Green color between 0 and 1. - - value2 (float): Blue color between 0 and 1. - """ - self.m_color = (value0, value1, value2) - self.Modified() - - @smproperty.xml( - """ - - - """ - ) - def f11PropertyGroup(self: Self) -> None: - """Organized groups.""" - self.Modified() - - def getCurveAspect( - self: Self, - ) -> tuple[tuple[float, float, float], str, float, str, float]: - """Get curve aspect properties according to user choices. - - Returns: - tuple: (color, linestyle, linewidth, marker, markersize) - """ - return ( - self.m_color, - self.m_lineStyle, - self.m_lineWidth, - self.m_markerStyle, - self.m_markerSize, - ) - - def FillInputPortInformation(self: Self, port: int, info: vtkInformation) -> int: - """Inherited from VTKPythonAlgorithmBase::RequestInformation. - - Args: - port (int): input port - info (vtkInformationVector): info - - Returns: - int: 1 if calculation successfully ended, 0 otherwise. - """ - if port == 0: - info.Set(self.INPUT_REQUIRED_DATA_TYPE(), "vtkDataObject") - else: - info.Set(self.INPUT_REQUIRED_DATA_TYPE(), "vtkDataObject") - return 1 - - def RequestDataObject( - self: Self, - request: vtkInformation, - inInfoVec: list[vtkInformationVector], - outInfoVec: vtkInformationVector, - ) -> int: - """Inherited from VTKPythonAlgorithmBase::RequestDataObject. - - Args: - request (vtkInformation): request - inInfoVec (list[vtkInformationVector]): input objects - outInfoVec (vtkInformationVector): output objects - - Returns: - int: 1 if calculation successfully ended, 0 otherwise. - """ - inData = self.GetInputData(inInfoVec, 0, 0) - outData = self.GetOutputData(outInfoVec, 0) - assert inData is not None - if outData is None or (not outData.IsA(inData.GetClassName())): - outData = inData.NewInstance() - outInfoVec.GetInformationObject(0).Set(outData.DATA_OBJECT(), outData) - return super().RequestDataObject(request, inInfoVec, outInfoVec) # type: ignore[no-any-return] - - def RequestData( - self: Self, - request: vtkInformation, # noqa: F841 - inInfoVec: list[vtkInformationVector], # noqa: F841 - outInfoVec: vtkInformationVector, # noqa: F841 - ) -> int: - """Inherited from VTKPythonAlgorithmBase::RequestData. - - Args: - request (vtkInformation): request - inInfoVec (list[vtkInformationVector]): input objects - outInfoVec (vtkInformationVector): output objects - - Returns: - int: 1 if calculation successfully ended, 0 otherwise. - """ - # pythonViewGeneration - assert self.m_pythonView is not None, "No Python View was found." - viewSize = GetActiveView().ViewSize - self.m_userChoices["ratio"] = viewSize[0] / viewSize[1] - self.defineInputNames() - self.defineUserChoicesCurves() - self.defineCurvesAspect() - self.m_pythonView.Script = self.buildPythonViewScript() - Render() - return 1 diff --git a/geos-posp/src/geos_posp/visu/PVUtils/checkboxFunction.py b/geos-posp/src/geos_posp/visu/PVUtils/checkboxFunction.py deleted file mode 100644 index 7fce5261..00000000 --- a/geos-posp/src/geos_posp/visu/PVUtils/checkboxFunction.py +++ /dev/null @@ -1,22 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# ruff: noqa -# type: ignore -def createModifiedCallback(anobject): - """Helper for the creation and use of vtkDataArraySelection in ParaView. - - Args: - anobject: any object. - """ - import weakref - - weakref_obj = weakref.ref(anobject) - anobject = None - - def _markmodified(*args, **kwars): - o = weakref_obj() - if o is not None: - o.Modified() - - return _markmodified diff --git a/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py b/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py deleted file mode 100644 index 7de0995b..00000000 --- a/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py +++ /dev/null @@ -1,611 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto, Martin Lemay -# ruff: noqa: E402 # disable Module level import not at top of file -from enum import Enum -from typing import Any, Union - -import numpy as np -import numpy.typing as npt -import pandas as pd # type: ignore[import-untyped] -from paraview.modules.vtkPVVTKExtensionsMisc import ( # type: ignore[import-not-found] - vtkMergeBlocks, -) -from paraview.simple import ( # type: ignore[import-not-found] - FindSource, - GetActiveView, - GetAnimationScene, - GetDisplayProperties, - GetSources, - servermanager, -) -from vtkmodules.vtkCommonCore import ( - vtkDataArray, - vtkDataArraySelection, - vtkDoubleArray, - vtkPoints, -) -from vtkmodules.vtkCommonDataModel import ( - vtkCompositeDataSet, - vtkDataObject, - vtkMultiBlockDataSet, - vtkPolyData, - vtkTable, - vtkUnstructuredGrid, -) - -from geos_posp.processing.vtkUtils import ( - getArrayInObject, - isAttributeInObject, -) -from geos_posp.utils.GeosOutputsConstants import ( - ComponentNameEnum, - GeosMeshOutputsEnum, -) - -# valid sources for Python view configurator -# TODO: need to be consolidated -HARD_CODED_VALID_PVC_TYPE: set[str] = {"GeosLogReader", "RenameArrays"} - - -def vtkTableToDataframe(table: vtkTable) -> pd.DataFrame: - """From a vtkTable, creates and returns a pandas dataframe. - - Args: - table (vtkTable): vtkTable object. - - Returns: - pd.DataFrame: Pandas dataframe. - """ - data: list[dict[str, Any]] = [] - for rowIndex in range(table.GetNumberOfRows()): - rowData: dict[str, Any] = {} - for colIndex in range(table.GetNumberOfColumns()): - colName: str = table.GetColumnName(colIndex) - cellValue: Any = table.GetValue(rowIndex, colIndex) - # we have a vtkVariant value, we need a float - cellValueF: float = cellValue.ToFloat() - rowData[colName] = cellValueF - data.append(rowData) - df: pd.DataFrame = pd.DataFrame(data) - return df - - -def vtkPolyDataToPointsDataframe(polydata: vtkPolyData) -> pd.DataFrame: - """Creates a pandas dataframe containing points data from vtkPolyData. - - Args: - polydata (vtkPolyData): vtkPolyData object. - - Returns: - pd.DataFrame: Pandas dataframe containing the points data. - """ - points: vtkPoints = polydata.GetPoints() - assert points is not None, "Points is undefined." - nbrPoints: int = points.GetNumberOfPoints() - data: dict[str, Any] = { - "Point ID": np.empty(nbrPoints), - "PointsX": np.empty(nbrPoints), - "PointsY": np.empty(nbrPoints), - "PointsZ": np.empty(nbrPoints), - } - for pointID in range(nbrPoints): - point: tuple[float, float, float] = points.GetPoint(pointID) - data["Point ID"][pointID] = pointID - data["PointsX"][pointID] = point[0] - data["PointsY"][pointID] = point[1] - data["PointsZ"][pointID] = point[2] - pointData = polydata.GetPointData() - nbrArrays: int = pointData.GetNumberOfArrays() - for i in range(nbrArrays): - arrayToUse = pointData.GetArray(i) - arrayName: str = pointData.GetArrayName(i) - subArrayNames: list[str] = findSubArrayNames(arrayToUse, arrayName) - # Collect the data for each sub array - for ind, name in enumerate(subArrayNames): - data[name] = np.empty(nbrPoints) - for k in range(nbrPoints): - # Every element of the tuple correspond to one distinct - # sub array so we only need one value at a time - value: float = arrayToUse.GetTuple(k)[ind] - data[name][k] = value - df: pd.DataFrame = pd.DataFrame(data).set_index("Point ID") - return df - - -def vtkUnstructuredGridCellsToDataframe(grid: vtkUnstructuredGrid) -> pd.DataFrame: - """Creates a pandas dataframe containing points data from vtkUnstructuredGrid. - - Args: - grid (vtkUnstructuredGrid): vtkUnstructuredGrid object. - - Returns: - pd.DataFrame: Pandas dataframe. - """ - cellIdAttributeName = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName - cellData = grid.GetCellData() - numberCells: int = grid.GetNumberOfCells() - data: dict[str, Any] = {} - for i in range(cellData.GetNumberOfArrays()): - arrayToUse = cellData.GetArray(i) - arrayName: str = cellData.GetArrayName(i) - subArrayNames: list[str] = findSubArrayNames(arrayToUse, arrayName) - # Collect the data for each sub array - for ind, name in enumerate(subArrayNames): - data[name] = np.empty(numberCells) - for k in range(numberCells): - # Every element of the tuple correspond to one distinct - # sub array so we only need one value at a time - value: float = arrayToUse.GetTuple(k)[ind] - data[name][k] = value - df: pd.DataFrame = pd.DataFrame(data).astype({cellIdAttributeName: int}) - - # set cell ids as index - - # df = df.astype({cellIdAttributeName: int}) - return df.set_index(cellIdAttributeName) - - -def vtkToDataframe(dataset: vtkDataObject) -> pd.DataFrame: - """Creates a dataframe containing points data from vtkTable or vtkPolyData. - - Args: - dataset (Any): dataset to convert if possible. - - Returns: - pd.DataFrame: if the dataset is in the right format. - """ - if isinstance(dataset, vtkTable): - return vtkTableToDataframe(dataset) - elif isinstance(dataset, vtkPolyData): - return vtkPolyDataToPointsDataframe(dataset) - elif isinstance(dataset, vtkUnstructuredGrid): - return vtkUnstructuredGridCellsToDataframe(dataset) - else: - raise AssertionError( - f"Invalid dataset format {type(dataset)}. " - + "Supported formats are: vtkTable, vtkpolyData and vtkUnstructuredGrid" - ) - - -def findSubArrayNames(vtkArray: vtkDataArray, arrayName: str) -> list[str]: - """Get sub array names from multi array attributes. - - Because arrays in ParaView can be of multiple dimensions, - it can be difficult to convert these arrays to numpy arrays. - Therefore, we can split the original array into multiple sub - one dimensional arrays. In that case, new sub names need to be - derived from the original array to be used. - - Args: - vtkArray (vtkDataArray): Array from vtk library. - arrayName (str): Name of the array. - - Returns: - list[str]: Sub array names from original array name. - """ - # The ordering of six elements can seem odd but is adapted to - # Geos output format of stress as : - # sigma11, sigma22, sigma33, sigma23, sigma13, sigma12 - sixComponents: tuple[str, str, str, str, str, str] = ComponentNameEnum.XYZ.value - nbrComponents: int = vtkArray.GetNumberOfComponents() - subArrayNames: list[str] = [] - if nbrComponents == 1: - subArrayNames.append(arrayName) - elif nbrComponents < 6: - for j in range(nbrComponents): - subArrayNames.append(arrayName + "_" + sixComponents[j]) - else: - for j in range(nbrComponents): - subArrayNames.append(arrayName + "_" + str(j)) - return subArrayNames - - -def getDataframesFromMultipleVTKSources( - sourceNames: set[str], commonColumn: str -) -> list[pd.DataFrame]: - """Creates the dataframe from each source if they have the commonColumn. - - Args: - sourceNames (set[str]): list of sources. - commonColumn (str): common column name. - - Returns: - list[pd.DataFrame]: output dataframe. - """ - # indexSource: int = commonColumn.rfind("__") - # commonColumnNoSource: str = commonColumn[:indexSource] - validDataframes: list[pd.DataFrame] = [] - for name in sourceNames: - source = FindSource(name) - assert source is not None, "Source is undefined." - dataset = servermanager.Fetch(source) - assert dataset is not None, "Dataset is undefined." - currentDF: pd.DataFrame = vtkToDataframe(dataset) - if commonColumn in currentDF.columns: - dfModified = currentDF.rename( - columns={ - col: col + "__" + name - for col in currentDF.columns - if col != commonColumn - } - ) - validDataframes.append(dfModified) - else: - print( - f"The source <<{name}>> could not be used" - + " to plot because the variable named <<" - + f"{commonColumn}>> could not be found." - ) - return validDataframes - - -def mergeDataframes(dataframes: list[pd.DataFrame], commonColumn: str) -> pd.DataFrame: - """Merge all dataframes into a single one by using the common column. - - Args: - dataframes (list[pd.DataFrame]): List of dataframes from - getDataframesFromMultipleVTKSources. - commonColumn (str): Name of the only common column between - all of the dataframes. - - Returns: - pd.DataFrame: Merged dataframes into a single one by 'outer' - on the commonColumn. - """ - assert len(dataframes) > 0 - if len(dataframes) == 1: - return dataframes[0] - else: - df0: pd.DataFrame = dataframes[0] - df1: pd.DataFrame = dataframes[1] - merged: pd.DataFrame = df0.merge(df1, on=commonColumn, how="outer") - if len(dataframes) > 2: - for df in dataframes[2:]: - merged = merged.merge(df, on=commonColumn, how="outer") - return merged - - -def addDataframeColumnsToVtkPolyData( - polyData: vtkPolyData, df: pd.DataFrame -) -> vtkPolyData: - """Add columns from a dataframe to a vtkPolyData. - - Args: - polyData (vtkPolyData): vtkPolyData before modifcation. - df (pd.DataFrame): Pandas dataframe. - - Returns: - vtkPolyData: vtkPolyData with new arrays. - """ - for column_name in df.columns: - column = df[column_name].values - array = vtkDoubleArray() - array.SetName(column_name) - array.SetNumberOfValues(polyData.GetNumberOfPoints()) - for i in range(polyData.GetNumberOfPoints()): - array.SetValue(i, column[i]) - polyData.GetPointData().AddArray(array) - - # Update vtkPolyData object - polyData.GetPointData().Modified() - polyData.Modified() - return polyData - - -# Functions to help the processing of PythonViewConfigurator - - -def getPossibleSourceNames() -> set[str]: - """Get the list of valid source names for PythonViewConfigurator. - - In PythonViewConfigurator, multiple sources can be considered as - valid inputs. We want the user to know the names of every of these - sources that can be used to plot data. This function therefore identifies - which source names are valid to be used later as sources. - - Returns: - set[str]: Source names in the paraview pipeline. - """ - # get all sources different from PythonViewConfigurator - validNames: set[str] = set() - for k in GetSources(): - sourceName: str = k[0] - source = FindSource(sourceName) - if (source is not None) and ("PythonViewConfigurator" not in source.__str__()): - dataset = servermanager.Fetch(source) - if dataset.IsA("vtkPolyData") or dataset.IsA("vtkTable"): - validNames.add(sourceName) - return validNames - - -def usefulSourceNamesPipeline() -> set[str]: - """Get the list of valid pipelines for PythonViewConfigurator. - - When using the PythonViewConfigurator, we want to check if the sources - in the ParaView pipeline are compatible with what the filter can take as - input. So this function scans every sources of the pipeline and if it - corresponds to one of the hardcoded valid types, we keep the name. - They are right now : ["GeosLogReader", "RenameArrays"] - - Returns: - set[str]: [sourceName1, ..., sourceNameN] - """ - usefulSourceNames: set[str] = set() - allSourceNames: set[str] = {n[0] for n, s in GetSources().items()} - for name in allSourceNames: - source = FindSource(name) - if type(source).__name__ in HARD_CODED_VALID_PVC_TYPE: - usefulSourceNames.add(name) - return usefulSourceNames - - -def getDatasFromSources(sourceNames: set[str]) -> dict[str, pd.DataFrame]: - """Get the data from input sources. - - Args: - sourceNames (set[str]): [sourceName1, ..., sourceNameN] - - Returns: - dict[[str, pd.DataFrame]]: dictionary where source names are keys and - dataframe are values. - { sourceName1: servermanager.Fetch(FindSource(sourceName1)), - ... - sourceNameN: servermanager.Fetch(FindSource(sourceNameN)) } - """ - usefulDatas: dict[str, Any] = {} - for name in sourceNames: - dataset = servermanager.Fetch(FindSource(name)) - usefulDatas[name] = dataset - return usefulDatas - - -def usefulVisibleDatasPipeline() -> dict[str, Any]: - """Get the list of visible pipelines. - - When using the PythonViewConfigurator, we want to collect the data of - each source that is visible in the paraview pipeline and that is - compatible as input data for the filter. Therefore, only certain types of - sources will be considered as valid. They are right now : - ["GeosLogReader", "RenameArrays"] - - Finally, if the sources are visible and valid, we access their data and - return the names of the source and their respective data. - - Returns: - dict[str, 'vtkInformation']: dictionary of source names and data from - pipeline. - { sourceName1: servermanager.Fetch(FindSource(sourceName1)), - ... - sourceNameN: servermanager.Fetch(FindSource(sourceNameN)) } - """ - usefulDatas: dict[str, Any] = {} - sourceNamesVisible: set[str] = set() - for n, s in GetSources().items(): - if servermanager.GetRepresentation(s, GetActiveView()) is not None: - displayProperties = GetDisplayProperties(s, view=GetActiveView()) - if (displayProperties is not None) and (displayProperties.Visibility == 1): - sourceNamesVisible.add(n[0]) - - for name in sourceNamesVisible: - source = FindSource(name) - if type(source).__name__ in HARD_CODED_VALID_PVC_TYPE: - usefulDatas[name] = servermanager.Fetch(FindSource(name)) - return usefulDatas - - -def isFilter(sourceName: str) -> bool: - """Identify if a source name can link to a filter in the ParaView pipeline. - - Args: - sourceName (str): name of a source object in the pipeline - - Returns: - bool: True if filter, False instead. - """ - source: Any = FindSource(sourceName) - if source is None: - print(f"sourceName <<{sourceName}>> does not exist in the pipeline") - return False - else: - try: - test: Any = source.GetClientSideObject().GetInputAlgorithm() # noqa: F841 - return True - except Exception: - return False - - -def getFilterInput(sourceName: str) -> vtkDataObject: - """Access the vtk dataset that is used as input for a filter. - - Args: - sourceName (str): name of a source object in the pipeline. - - Returns: - Any: The vtk dataset that serves as input for the filter. - """ - filtre = FindSource(sourceName) - assert filtre is not None, "Source is undefined." - clientSideObject = filtre.GetClientSideObject() - assert clientSideObject is not None, "Client Side Object is undefined." - inputAlgo = clientSideObject.GetInputAlgorithm() - assert inputAlgo is not None, "Input Algorithm is undefined." - inputValues = inputAlgo.GetInput() - if isinstance(inputValues, vtkDataObject): - return inputValues - return vtkDataObject() - - -def getArrayChoices(array: vtkDataArraySelection) -> list[str]: - """Extracts the column names of input array when they are enabled. - - Args: - array (vtkDataArraySelection): input data - - Returns: - set[str]: [columnName1, ..., columnNameN] - """ - checkedColumns: list[str] = [] - for i in range(array.GetNumberOfArrays()): - columnName: str = array.GetArrayName(i) - if array.ArrayIsEnabled(columnName): - checkedColumns.append(columnName) - return checkedColumns - - -def integrateSourceNames(sourceNames: set[str], arrayChoices: set[str]) -> set[str]: - """Aggregate source and arrayChoices names. - - When creating the user choices in PythonViewConfigurator, you need - to take into account both the source names and the choices of curves - to have user choices corresponding to the column names of the dataframe - with the data to be plot. - - Args: - sourceNames (set[str]): Name of sources found in ParaView pipeline. - arrayChoices (set[str]): Column names of the vtkdataarrayselection. - - Returns: - set[str]: [sourceName1__choice1, sourceName1__choice2, - ..., sourceNameN__choiceN] - """ - completeNames: set[str] = set() - for sourceName in sourceNames: - for choice in arrayChoices: - completeName: str = choice + "__" + sourceName - completeNames.add(completeName) - return completeNames - - -def getVtkOriginalCellIds( - mesh: Union[vtkMultiBlockDataSet, vtkCompositeDataSet, vtkDataObject] -) -> list[str]: - """Get vtkOriginalCellIds from a vtkUnstructuredGrid object. - - Args: - mesh (vtkMultiBlockDataSet|vtkCompositeDataSet|vtkDataObject): input mesh. - - Returns: - list[str]: ids of the cells. - """ - # merge blocks for vtkCompositeDataSet - mesh2: vtkUnstructuredGrid = mergeFilterPV(mesh, True) - name: str = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName - assert isAttributeInObject( - mesh2, name, False - ), f"Attribute {name} is not in the mesh." - return [str(int(ide)) for ide in getArrayInObject(mesh2, name, False)] - - -def strEnumToEnumerationDomainXml(enumObj: Enum) -> str: - """Creates an enumeration domain from an Enum objec. - - Creates an enumeration domain from an Enum objec - for the dropdown widgets of paraview plugin. - - Args: - enumObj (Enum): Enumeration values to put in the dropdown widget. - - Returns: - str: the XML string. - """ - xml: str = """""" - for i, unitObj in enumerate(list(enumObj)): # type: ignore[call-overload] - xml += f"""""" - xml += """""" - return xml - - -def strListToEnumerationDomainXml(properties: Union[list[str], set[str]]) -> str: - """Creates an enumeration domain from a list of strings. - - Creates an enumeration domain from a list of strings - for the dropdown widgets of paraview plugin. - - Args: - properties (set[str] | list[str]): Properties to put in the dropdown widget. - - Returns: - str: the XML string. - """ - xml: str = """""" - for i, prop in enumerate(list(properties)): - xml += f"""""" - xml += """""" - return xml - - -def dataframeForEachTimestep(sourceName: str) -> dict[str, pd.DataFrame]: - """Get the data from source at each time step. - - In ParaView, a source object can contain data for multiple - timesteps. If so, knowing the source name, we can access its data - for each timestep and store it in a dict where the keys are the - timesteps and the values the data at each one of them. - - Args: - sourceName (str): Name of the source in ParaView pipeline. - - Returns: - dict[str, pd.DataFrame]: dictionary where time is the key and dataframe - is the value. - """ - animationScene = GetAnimationScene() - assert animationScene is not None, "animationScene is undefined." - # we set the animation to the initial timestep - animationScene.GoToFirst() - source = FindSource(sourceName) - dataset: vtkDataObject = servermanager.Fetch(source) - assert dataset is not None, f"Dataset is undefined." - dataset2: vtkUnstructuredGrid = mergeFilterPV(dataset) - time: str = str(animationScene.TimeKeeper.Time) - dfPerTimestep: dict[str, pd.DataFrame] = {time: vtkToDataframe(dataset2)} - # then we iterate on the other timesteps of the source - for _ in range(animationScene.NumberOfFrames): # type: ignore - animationScene.GoToNext() - source = FindSource(sourceName) - dataset = servermanager.Fetch(source) - dataset2 = mergeFilterPV(dataset) - time = str(animationScene.TimeKeeper.Time) - dfPerTimestep[time] = vtkToDataframe(dataset2) - return dfPerTimestep - - -def getTimeStepIndex(time: float, timeSteps: npt.NDArray[np.float64]) -> int: - """Get the time step index of input time from the list of time steps. - - Args: - time (float): time - timeSteps (npt.NDArray[np.float64]): Array of time steps - - Returns: - int: time step index - """ - indexes: npt.NDArray[np.int64] = np.where(np.isclose(timeSteps, time))[0] - assert ( - indexes.size > 0 - ), f"Current time {time} does not exist in the selected object." - return int(indexes[0]) - - -def mergeFilterPV( - input: vtkDataObject, - keepPartialAttributes: bool = False, -) -> vtkUnstructuredGrid: - """Apply Paraview merge block filter. - - Args: - input (vtkMultiBlockDataSet | vtkCompositeDataSet | vtkDataObject): composite - object to merge blocks - keepPartialAttributes (bool): if True, keep partial attributes after merge. - - Defaults to False. - - Returns: - vtkUnstructuredGrid: merged block object - - """ - mergeFilter: vtkMergeBlocks = vtkMergeBlocks() - mergeFilter.SetInputData(input) - mergeFilter.Update() - return mergeFilter.GetOutputDataObject(0) diff --git a/geos-posp/src/geos_posp/visu/pythonViewUtils/Figure2DGenerator.py b/geos-posp/src/geos_posp/visu/pythonViewUtils/Figure2DGenerator.py deleted file mode 100644 index b585922a..00000000 --- a/geos-posp/src/geos_posp/visu/pythonViewUtils/Figure2DGenerator.py +++ /dev/null @@ -1,146 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto - -from typing import Any - -import pandas as pd # type: ignore[import-untyped] -from matplotlib import axes, figure, lines # type: ignore[import-untyped] -from matplotlib.font_manager import ( # type: ignore[import-untyped] - FontProperties, # type: ignore[import-untyped] -) -from typing_extensions import Self - -import geos_posp.visu.pythonViewUtils.functionsFigure2DGenerator as fcts -from geos_posp.utils.Logger import Logger, getLogger - - -class Figure2DGenerator: - def __init__( - self: Self, dataframe: pd.DataFrame, userChoices: dict[str, list[str]] - ) -> None: - """Utility to create cross plots using Python View. - - We want to plot f(X) = Y where in this class, - "X" will be called "variable", "Y" will be called "curves". - - Args: - dataframe (pd.DataFrame): data to plot - userChoices (dict[str, list[str]]): user choices. - """ - self.m_dataframe: pd.DataFrame = dataframe - self.m_userChoices: dict[str, Any] = userChoices - self.m_fig: figure.Figure - self.m_axes: list[axes._axes.Axes] = [] - self.m_lines: list[lines.Line2D] = [] - self.m_labels: list[str] = [] - self.m_logger: Logger = getLogger("Python View Configurator") - - try: - # apply minus 1 multiplication on certain columns - self.initMinus1Multiplication() - # defines m_fig, m_axes, m_lines and m_lables - self.plotInitialFigure() - # then to edit and customize the figure - self.enhanceFigure() - self.m_logger.info("Data were successfully plotted.") - - except Exception as e: - mess: str = "Plot creation failed due to:" - self.m_logger.critical(mess) - self.m_logger.critical(e, exc_info=True) - - def initMinus1Multiplication(self: Self) -> None: - """Multiply by -1 certain columns of the input dataframe.""" - df: pd.DataFrame = self.m_dataframe.copy(deep=True) - minus1CurveNames: list[str] = self.m_userChoices["curveConvention"] - for name in minus1CurveNames: - df[name] = df[name] * (-1) - self.m_dataframe = df - - def enhanceFigure(self: Self) -> None: - """Apply all the enhancement features to the initial figure.""" - self.changeTitle() - self.changeMinorticks() - self.changeAxisScale() - self.changeAxisLimits() - - def plotInitialFigure(self: Self) -> None: - """Generates a figure and axes objects from matplotlib. - - The figure plots all the curves along the X or Y axis, with legend and - label for X and Y. - """ - if self.m_userChoices["plotRegions"]: - if not self.m_userChoices["reverseXY"]: - (fig, ax_all, lines, labels) = fcts.multipleSubplots( - self.m_dataframe, self.m_userChoices - ) - else: - (fig, ax_all, lines, labels) = fcts.multipleSubplotsInverted( - self.m_dataframe, self.m_userChoices - ) - else: - if not self.m_userChoices["reverseXY"]: - (fig, ax_all, lines, labels) = fcts.oneSubplot( - self.m_dataframe, self.m_userChoices - ) - else: - (fig, ax_all, lines, labels) = fcts.oneSubplotInverted( - self.m_dataframe, self.m_userChoices - ) - self.m_fig = fig - self.m_axes = ax_all - self.m_lines = lines - self.m_labels = labels - - def changeTitle(self: Self) -> None: - """Update title of the first axis of the figure based on user choices.""" - if self.m_userChoices["displayTitle"]: - title: str = self.m_userChoices["title"] - fontTitle: FontProperties = fcts.buildFontTitle(self.m_userChoices) - self.m_fig.suptitle(title, fontproperties=fontTitle) - - def changeMinorticks(self: Self) -> None: - """Set the minorticks on or off for every axes.""" - choice: bool = self.m_userChoices["minorticks"] - if choice: - for ax in self.m_axes: - ax.minorticks_on() - else: - for ax in self.m_axes: - ax.minorticks_off() - - def changeAxisScale(self: Self) -> None: - """Set the minorticks on or off for every axes.""" - for ax in self.m_axes: - if self.m_userChoices["logScaleX"]: - ax.set_xscale("log") - if self.m_userChoices["logScaleY"]: - ax.set_yscale("log") - - def changeAxisLimits(self: Self) -> None: - """Update axis limits.""" - if self.m_userChoices["customAxisLim"]: - for ax in self.m_axes: - xmin, xmax = ax.get_xlim() - if self.m_userChoices["limMinX"] is not None: - xmin = self.m_userChoices["limMinX"] - if self.m_userChoices["limMaxX"] is not None: - xmax = self.m_userChoices["limMaxX"] - ax.set_xlim(xmin, xmax) - - ymin, ymax = ax.get_ylim() - if self.m_userChoices["limMinY"] is not None: - ymin = self.m_userChoices["limMinY"] - if self.m_userChoices["limMaxY"] is not None: - ymax = self.m_userChoices["limMaxY"] - ax.set_ylim(ymin, ymax) - - def getFigure(self: Self) -> figure.Figure: - """Acces the m_fig attribute. - - Returns: - figure.Figure: Figure containing all the plots. - """ - return self.m_fig diff --git a/geos-posp/src/geos_posp/visu/pythonViewUtils/functionsFigure2DGenerator.py b/geos-posp/src/geos_posp/visu/pythonViewUtils/functionsFigure2DGenerator.py deleted file mode 100644 index 6547adfe..00000000 --- a/geos-posp/src/geos_posp/visu/pythonViewUtils/functionsFigure2DGenerator.py +++ /dev/null @@ -1,1424 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -import math -from typing import Any - -import matplotlib.pyplot as plt # type: ignore[import-untyped] -import numpy as np -import numpy.typing as npt -import pandas as pd # type: ignore[import-untyped] -from matplotlib import axes, figure, lines # type: ignore[import-untyped] -from matplotlib.font_manager import ( # type: ignore[import-untyped] - FontProperties, # type: ignore[import-untyped] -) - -import geos_posp.processing.geosLogReaderFunctions as fcts - -""" -Plotting tools for 2D figure and axes generation. -""" - - -def oneSubplot( - df: pd.DataFrame, userChoices: dict[str, Any] -) -> tuple[figure.Figure, list[axes.Axes], list[lines.Line2D], list[str]]: - """Created a single subplot. - - From a dataframe, knowing which curves to plot along which variable, - generates a fig and its list of axes with the data plotted. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName" - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[figure.Figure, list[axes.Axes], - list[lines.Line2D] , list[str]]: the fig and its list of axes. - """ - curveNames: list[str] = userChoices["curveNames"] - variableName: str = userChoices["variableName"] - curvesAspect: dict[ - str, tuple[tuple[float, float, float], str, float, str, float] - ] = userChoices["curvesAspect"] - associatedProperties: dict[str, list[str]] = associatePropertyToAxeType(curveNames) - fig, ax = plt.subplots(constrained_layout=True) - all_ax: list[axes.Axes] = setupAllAxes(ax, variableName, associatedProperties, True) - lineList: list[lines.Line2D] = [] - labels: list[str] = [] - cpt_cmap: int = 0 - x: npt.NDArray[np.float64] = df[variableName].to_numpy() - for cpt_ax, (ax_name, propertyNames) in enumerate(associatedProperties.items()): - ax_to_use: axes.Axes = setupAxeToUse(all_ax, cpt_ax, ax_name, False) - for propName in propertyNames: - y: npt.NDArray[np.float64] = df[propName].to_numpy() - plotAxe(ax_to_use, x, y, propName, cpt_cmap, curvesAspect) - cpt_cmap += 1 - new_lines, new_labels = ax_to_use.get_legend_handles_labels() - lineList += new_lines # type: ignore[arg-type] - labels += new_labels - labels, lineList = smartLabelsSorted(labels, lineList, userChoices) - if userChoices["displayLegend"]: - ax.legend( - lineList, - labels, - loc=userChoices["legendPosition"], - fontsize=userChoices["legendSize"], - ) - ax.grid() - return (fig, all_ax, lineList, labels) - - -def oneSubplotInverted( - df: pd.DataFrame, userChoices: dict[str, Any] -) -> tuple[figure.Figure, list[axes.Axes], list[lines.Line2D], list[str]]: - """Created a single subplot with inverted X Y axes. - - From a dataframe, knowing which curves to plot along which variable, - generates a fig and its list of axes with the data plotted. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName" - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[figure.Figure, list[axes.Axes], - list[lines.Line2D] , list[str]]: the fig and its list of axes. - """ - curveNames: list[str] = userChoices["curveNames"] - variableName: str = userChoices["variableName"] - curvesAspect: dict[ - str, tuple[tuple[float, float, float], str, float, str, float] - ] = userChoices["curvesAspect"] - associatedProperties: dict[str, list[str]] = associatePropertyToAxeType(curveNames) - fig, ax = plt.subplots(constrained_layout=True) - all_ax: list[axes.Axes] = setupAllAxes( - ax, variableName, associatedProperties, False - ) - linesList: list[lines.Line2D] = [] - labels: list[str] = [] - cpt_cmap: int = 0 - y: npt.NDArray[np.float64] = df[variableName].to_numpy() - for cpt_ax, (ax_name, propertyNames) in enumerate(associatedProperties.items()): - ax_to_use: axes.Axes = setupAxeToUse(all_ax, cpt_ax, ax_name, True) - for propName in propertyNames: - x: npt.NDArray[np.float64] = df[propName].to_numpy() - plotAxe(ax_to_use, x, y, propName, cpt_cmap, curvesAspect) - cpt_cmap += 1 - new_lines, new_labels = ax_to_use.get_legend_handles_labels() - linesList += new_lines # type: ignore[arg-type] - labels += new_labels - labels, linesList = smartLabelsSorted(labels, linesList, userChoices) - if userChoices["displayLegend"]: - ax.legend( - linesList, - labels, - loc=userChoices["legendPosition"], - fontsize=userChoices["legendSize"], - ) - ax.grid() - return (fig, all_ax, linesList, labels) - - -def multipleSubplots( - df: pd.DataFrame, userChoices: dict[str, Any] -) -> tuple[figure.Figure, list[axes.Axes], list[lines.Line2D], list[str]]: - """Created multiple subplots. - - From a dataframe, knowing which curves to plot along which variable, - generates a fig and its list of axes with the data plotted. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName". - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[figure.Figure, list[axes.Axes], - list[lines.Line2D] , list[str]]: the fig and its list of axes. - """ - curveNames: list[str] = userChoices["curveNames"] - variableName: str = userChoices["variableName"] - curvesAspect: dict[ - str, tuple[tuple[float, float, float], str, float, str, float] - ] = userChoices["curvesAspect"] - ratio: float = userChoices["ratio"] - assosIdentifiers: dict[str, dict[str, list[str]]] = associationIdentifiers( - curveNames - ) - nbr_suplots: int = len(assosIdentifiers.keys()) - # if only one subplots needs to be created - if nbr_suplots == 1: - return oneSubplot(df, userChoices) - - layout: tuple[int, int, int] = smartLayout(nbr_suplots, ratio) - fig, axs0 = plt.subplots(layout[0], layout[1], constrained_layout=True) - axs: list[axes.Axes] = axs0.flatten().tolist() # type: ignore[union-attr] - for i in range(layout[2]): - fig.delaxes(axs[-(i + 1)]) - all_lines: list[lines.Line2D] = [] - all_labels: list[str] = [] - # first loop for subplots - propertiesExtremas: dict[str, tuple[float, float]] = ( - findExtremasPropertiesForAssociatedIdentifiers(df, assosIdentifiers, True) - ) - for j, identifier in enumerate(assosIdentifiers.keys()): - first_ax: axes.Axes = axs[j] - associatedProperties: dict[str, list[str]] = assosIdentifiers[identifier] - all_ax: list[axes.Axes] = setupAllAxes( - first_ax, variableName, associatedProperties, True - ) - axs += all_ax[1:] - linesList: list[lines.Line2D] = [] - labels: list[str] = [] - cpt_cmap: int = 0 - x: npt.NDArray[np.float64] = df[variableName].to_numpy() - # second loop for axes per subplot - for cpt_ax, (ax_name, propertyNames) in enumerate(associatedProperties.items()): - ax_to_use: axes.Axes = setupAxeToUse(all_ax, cpt_ax, ax_name, False) - for propName in propertyNames: - y: npt.NDArray[np.float64] = df[propName].to_numpy() - plotAxe(ax_to_use, x, y, propName, cpt_cmap, curvesAspect) - ax_to_use.set_ylim(*propertiesExtremas[ax_name]) - cpt_cmap += 1 - new_lines, new_labels = ax_to_use.get_legend_handles_labels() - linesList += new_lines # type: ignore[arg-type] - all_lines += new_lines # type: ignore[arg-type] - labels += new_labels - all_labels += new_labels - labels, linesList = smartLabelsSorted(labels, linesList, userChoices) - if userChoices["displayLegend"]: - first_ax.legend( - linesList, - labels, - loc=userChoices["legendPosition"], - fontsize=userChoices["legendSize"], - ) - if userChoices["displayTitle"]: - first_ax.set_title(identifier, fontsize=10) - first_ax.grid() - return (fig, axs, all_lines, all_labels) - - -def multipleSubplotsInverted( - df: pd.DataFrame, userChoices: dict[str, Any] -) -> tuple[figure.Figure, list[axes.Axes], list[lines.Line2D], list[str]]: - """Created multiple subplots with inverted X Y axes. - - From a dataframe, knowing which curves to plot along which variable, - generates a fig and its list of axes with the data plotted. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName". - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[figure.Figure, list[axes.Axes], - list[lines.Line2D] , list[str]]: the fig and its list of axes. - """ - curveNames: list[str] = userChoices["curveNames"] - variableName: str = userChoices["variableName"] - curvesAspect: dict[ - str, tuple[tuple[float, float, float], str, float, str, float] - ] = userChoices["curvesAspect"] - ratio: float = userChoices["ratio"] - assosIdentifiers: dict[str, dict[str, list[str]]] = associationIdentifiers( - curveNames - ) - nbr_suplots: int = len(assosIdentifiers.keys()) - # if only one subplots needs to be created - if nbr_suplots == 1: - return oneSubplotInverted(df, userChoices) - - layout: tuple[int, int, int] = smartLayout(nbr_suplots, ratio) - fig, axs0 = plt.subplots(layout[0], layout[1], constrained_layout=True) - axs: list[axes.Axes] = axs0.flatten().tolist() # type: ignore[union-attr] - for i in range(layout[2]): - fig.delaxes(axs[-(i + 1)]) - all_lines: list[lines.Line2D] = [] - all_labels: list[str] = [] - # first loop for subplots - propertiesExtremas: dict[str, tuple[float, float]] = ( - findExtremasPropertiesForAssociatedIdentifiers(df, assosIdentifiers, True) - ) - for j, identifier in enumerate(assosIdentifiers.keys()): - first_ax: axes.Axes = axs[j] - associatedProperties: dict[str, list[str]] = assosIdentifiers[identifier] - all_ax: list[axes.Axes] = setupAllAxes( - first_ax, variableName, associatedProperties, False - ) - axs += all_ax[1:] - linesList: list[lines.Line2D] = [] - labels: list[str] = [] - cpt_cmap: int = 0 - y: npt.NDArray[np.float64] = df[variableName].to_numpy() - # second loop for axes per subplot - for cpt_ax, (ax_name, propertyNames) in enumerate(associatedProperties.items()): - ax_to_use: axes.Axes = setupAxeToUse(all_ax, cpt_ax, ax_name, True) - for propName in propertyNames: - x: npt.NDArray[np.float64] = df[propName].to_numpy() - plotAxe(ax_to_use, x, y, propName, cpt_cmap, curvesAspect) - ax_to_use.set_xlim(propertiesExtremas[ax_name]) - cpt_cmap += 1 - new_lines, new_labels = ax_to_use.get_legend_handles_labels() - linesList += new_lines # type: ignore[arg-type] - all_lines += new_lines # type: ignore[arg-type] - labels += new_labels - all_labels += new_labels - labels, linesList = smartLabelsSorted(labels, linesList, userChoices) - if userChoices["displayLegend"]: - first_ax.legend( - linesList, - labels, - loc=userChoices["legendPosition"], - fontsize=userChoices["legendSize"], - ) - if userChoices["displayTitle"]: - first_ax.set_title(identifier, fontsize=10) - first_ax.grid() - return (fig, axs, all_lines, all_labels) - - -def setupAllAxes( - first_ax: axes.Axes, - variableName: str, - associatedProperties: dict[str, list[str]], - axisX: bool, -) -> list[axes.Axes]: - """Modify axis name and ticks avec X or Y axis of all subplots. - - Args: - first_ax (axes.Axes): subplot id. - variableName (str): name of the axis. - associatedProperties (dict[str, list[str]]): Name of the properties - axisX (bool): X (True) or Y (False) axis to modify. - - Returns: - list[axes.Axes]: modified subplots - """ - all_ax: list[axes.Axes] = [first_ax] - if axisX: - first_ax.set_xlabel(variableName) - first_ax.ticklabel_format( - style="sci", axis="x", scilimits=(0, 0), useMathText=True - ) - for i in range(1, len(associatedProperties.keys())): - second_ax = first_ax.twinx() - assert isinstance(second_ax, axes.Axes) - all_ax.append(second_ax) - all_ax[i].spines["right"].set_position(("axes", 1 + 0.07 * (i - 1))) - all_ax[i].tick_params(axis="y", which="both", left=False, right=True) - all_ax[i].yaxis.set_ticks_position("right") - all_ax[i].yaxis.offsetText.set_position((1.04 + 0.07 * (i - 1), 0)) - first_ax.yaxis.offsetText.set_position((-0.04, 0)) - else: - first_ax.set_ylabel(variableName) - first_ax.ticklabel_format( - style="sci", axis="y", scilimits=(0, 0), useMathText=True - ) - for i in range(1, len(associatedProperties.keys())): - second_ax = first_ax.twiny() - assert isinstance(second_ax, axes.Axes) - all_ax.append(second_ax) - all_ax[i].spines["bottom"].set_position(("axes", -0.08 * i)) - all_ax[i].xaxis.set_label_position("bottom") - all_ax[i].tick_params(axis="x", which="both", bottom=True, top=False) - all_ax[i].xaxis.set_ticks_position("bottom") - return all_ax - - -def setupAxeToUse( - all_ax: list[axes.Axes], axeId: int, ax_name: str, axisX: bool -) -> axes.Axes: - """Modify axis name and ticks avec X or Y axis of subplot axeId in all_ax. - - Args: - all_ax (list[axes.Axes]): list of all subplots - axeId (int): id of the subplot - ax_name (str): name of the X or Y axis - axisX (bool): X (True) or Y (False) axis to modify. - - Returns: - axes.Axes: modified subplot - """ - ax_to_use: axes.Axes = all_ax[axeId] - if axisX: - ax_to_use.set_xlabel(ax_name) - ax_to_use.ticklabel_format( - style="sci", axis="x", scilimits=(0, 0), useMathText=True - ) - else: - ax_to_use.set_ylabel(ax_name) - ax_to_use.ticklabel_format( - style="sci", axis="y", scilimits=(0, 0), useMathText=True - ) - return ax_to_use - - -def plotAxe( - ax_to_use: axes.Axes, - x: npt.NDArray[np.float64], - y: npt.NDArray[np.float64], - propertyName: str, - cpt_cmap: int, - curvesAspect: dict[str, tuple[tuple[float, float, float], str, float, str, float]], -) -> None: - """Plot x, y data using input ax_to_use according to curvesAspect. - - Args: - ax_to_use (axes.Axes): subplot to use - x (npt.NDArray[np.float64]): abscissa data - y (npt.NDArray[np.float64]): ordinate data - propertyName (str): name of the property - cpt_cmap (int): colormap to use - curvesAspect (dict[str, tuple[tuple[float, float, float],str, float, str, float]]): - user choices on curve aspect - """ - cmap = plt.rcParams["axes.prop_cycle"].by_key()["color"][cpt_cmap % 10] - mask = np.logical_and(np.isnan(x), np.isnan(y)) - not_mask = ~mask - # Plot only when x and y values are not nan values - if propertyName in curvesAspect: - asp: tuple[tuple[float, float, float], str, float, str, float] = curvesAspect[ - propertyName - ] - ax_to_use.plot( - x[not_mask], - y[not_mask], - label=propertyName, - color=asp[0], - linestyle=asp[1], - linewidth=asp[2], - marker=asp[3], - markersize=asp[4], - ) - else: - ax_to_use.plot(x[not_mask], y[not_mask], label=propertyName, color=cmap) - - -def getExtremaAllAxes( - axes: list[axes.Axes], -) -> tuple[tuple[float, float], tuple[float, float]]: - """Gets the limits of both X and Y axis as a 2x2 element tuple. - - Args: - axes (list[axes.Axes]): list of subplots to get limits. - - Returns: - tuple[tuple[float, float], tuple[float, float]]:: ((xMin, xMax), (yMin, yMax)) - """ - assert len(axes) > 0 - xMin, xMax, yMin, yMax = getAxeLimits(axes[0]) - if len(axes) > 1: - for i in range(1, len(axes)): - x1, x2, y1, y2 = getAxeLimits(axes[i]) - if x1 < xMin: - xMin = x1 - if x2 > xMax: - xMax = x2 - if y1 < yMin: - yMin = y1 - if y2 > yMax: - yMax = y2 - return ((xMin, xMax), (yMin, yMax)) - - -def getAxeLimits(ax: axes.Axes) -> tuple[float, float, float, float]: - """Gets the limits of both X and Y axis as a 4 element tuple. - - Args: - ax (axes.Axes): subplot to get limits. - - Returns: - tuple[float, float, float, float]: (xMin, xMax, yMin, yMax) - """ - xMin, xMax = ax.get_xlim() - yMin, yMax = ax.get_ylim() - return (xMin, xMax, yMin, yMax) - - -def findExtremasPropertiesForAssociatedIdentifiers( - df: pd.DataFrame, - associatedIdentifiers: dict[str, dict[str, list[str]]], - offsetPlotting: bool = False, - offsetPercentage: int = 5, -) -> dict[str, tuple[float, float]]: - """Find min and max of all properties linked to a same identifier. - - Using an associatedIdentifiers dict containing associatedProperties dict, - we can find the extremas for each property of each identifier. Once we have them all, - we compare for each identifier what are the most extreme values and only the biggest and - lowest are kept in the end. - - - Args: - df (pd.DataFrame): Pandas dataframe - associatedIdentifiers (dict[str, dict[str, list[str]]]): property identifiers. - offsetPlotting (bool, optional): When using the values being returned, - we might want to add an offset to these values. If set to True, - the offsetPercentage is taken into account. Defaults to False. - offsetPercentage (int, optional): Value by which we will offset - the min and max values of each tuple of floats. Defaults to 5. - - Returns: - dict[str, tuple[float, float]]: { - "BHP (Pa)": (minAllWells, maxAllWells), - "TotalMassRate (kg)": (minAllWells, maxAllWells), - "TotalSurfaceVolumetricRate (m3/s)": (minAllWells, maxAllWells), - "SurfaceVolumetricRateCO2 (m3/s)": (minAllWells, maxAllWells), - "SurfaceVolumetricRateWater (m3/s)": (minAllWells, maxAllWells) - } - """ - extremasProperties: dict[str, tuple[float, float]] = {} - # first we need to find the extrema for each property type per region - propertyTypesExtremas: dict[str, list[tuple[float, float]]] = {} - for associatedProperties in associatedIdentifiers.values(): - extremasPerProperty: dict[str, tuple[float, float]] = ( - findExtremasAssociatedProperties(df, associatedProperties) - ) - for propertyType, extremaFound in extremasPerProperty.items(): - if propertyType not in propertyTypesExtremas: - propertyTypesExtremas[propertyType] = [extremaFound] - else: - propertyTypesExtremas[propertyType].append(extremaFound) - # then, once all extrema have been found for all regions, we need to figure out - # which extrema per property type is the most extreme one - for propertyType in propertyTypesExtremas: - values: list[tuple[float, float]] = propertyTypesExtremas[propertyType] - minValues: list[float] = [values[i][0] for i in range(len(values))] - maxValues: list[float] = [values[i][1] for i in range(len(values))] - lowest, highest = (min(minValues), max(maxValues)) - if offsetPlotting: - offset: float = (highest - lowest) / 100 * offsetPercentage - lowest, highest = (lowest - offset, highest + offset) - extremasProperties[propertyType] = (lowest, highest) - return extremasProperties - - -def findExtremasAssociatedProperties( - df: pd.DataFrame, associatedProperties: dict[str, list[str]] -) -> dict[str, tuple[float, float]]: - """Find the min and max of properties. - - Using an associatedProperties dict containing property types - as keys and a list of property names as values, - and a pandas dataframe whose column names are composed of those same - property names, you can find the min and max values of each property - type and return it as a tuple. - - Args: - df (pd.DataFrame): Pandas dataframe - associatedProperties (dict[str, list[str]]): { - "Pressure (Pa)": ["Reservoir__Pressure__Pa__Source1"], - "Mass (kg)": ["CO2__Mass__kg__Source1", - "Water__Mass__kg__Source1"] - } - - Returns: - dict[str, tuple[float, float]]: { - "Pressure (Pa)": (minPressure, maxPressure), - "Mass (kg)": (minMass, maxMass) - } - """ - extremasProperties: dict[str, tuple[float, float]] = {} - for propertyType, propertyNames in associatedProperties.items(): - minValues = np.empty(len(propertyNames)) - maxValues = np.empty(len(propertyNames)) - for i, propertyName in enumerate(propertyNames): - values: npt.NDArray[np.float64] = df[propertyName].to_numpy() - minValues[i] = np.nanmin(values) - maxValues[i] = np.nanmax(values) - extrema: tuple[float, float] = ( - float(np.min(minValues)), - float(np.max(maxValues)), - ) - extremasProperties[propertyType] = extrema - return extremasProperties - - -""" -Utils for treatment of the data -""" - - -def associatePropertyToAxeType(propertyNames: list[str]) -> dict[str, list[str]]: - """Identify property types. - - From a list of property names, identify if each of this property - corresponds to a certain property type like "Pressure", "Mass", - "Temperature" etc ... and returns a dict where the keys are the property - type and the value the list of property names associated to it. - - Args: - propertyNames (list[str]): ["Reservoir__Pressure__Pa__Source1", - "CO2__Mass__kg__Source1", "Water__Mass__kg__Source1"] - - Returns: - dict[str, list[str]]: { "Pressure (Pa)": ["Reservoir__Pressure__Pa__Source1"], - "Mass (kg)": ["CO2__Mass__kg__Source1", - "Water__Mass__kg__Source1"] } - """ - propertyIds: list[str] = fcts.identifyProperties(propertyNames) - associationTable: dict[str, str] = { - "0": "Pressure", - "1": "Pressure", - "2": "Temperature", - "3": "PoreVolume", - "4": "PoreVolume", - "5": "Mass", - "6": "Mass", - "7": "Mass", - "8": "Mass", - "9": "Mass", - "10": "Mass", - "11": "BHP", - "12": "MassRate", - "13": "VolumetricRate", - "14": "VolumetricRate", - "15": "BHP", - "16": "MassRate", - "17": "VolumetricRate", - "18": "VolumetricRate", - "19": "VolumetricRate", - "20": "Volume", - "21": "VolumetricRate", - "22": "Volume", - "23": "Iterations", - "24": "Iterations", - "25": "Stress", - "26": "Displacement", - "27": "Permeability", - "28": "Porosity", - "29": "Ratio", - "30": "Fraction", - "31": "BulkModulus", - "32": "ShearModulus", - "33": "OedometricModulus", - "34": "Points", - "35": "Density", - "36": "Mass", - "37": "Mass", - "38": "Time", - "39": "Time", - } - associatedPropertyToAxeType: dict[str, list[str]] = {} - noUnitProperties: list[str] = [ - "Iterations", - "Porosity", - "Ratio", - "Fraction", - "OedometricModulus", - ] - for i, propId in enumerate(propertyIds): - idProp: str = propId.split(":")[0] - propNoId: str = propId.split(":")[1] - associatedType: str = associationTable[idProp] - if associatedType in noUnitProperties: - axeName: str = associatedType - else: - propIdElts: list[str] = propNoId.split("__") - # no unit was found - if len(propIdElts) <= 2: - axeName = associatedType - # there is a unit - else: - unit: str = propIdElts[-2] - axeName = associatedType + " (" + unit + ")" - if axeName not in associatedPropertyToAxeType: - associatedPropertyToAxeType[axeName] = [] - associatedPropertyToAxeType[axeName].append(propertyNames[i]) - return associatedPropertyToAxeType - - -def propertiesPerIdentifier(propertyNames: list[str]) -> dict[str, list[str]]: - """Extract identifiers with associatied properties. - - From a list of property names, extracts the identifier (name of the - region for flow property or name of a well for well property) and creates - a dictionnary with identifiers as keys and the properties containing them - for value in a list. - - Args: - propertyNames (list[str]): property names - Example - - .. code-block:: python - - [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1" - ] - - Returns: - dict[str, list[str]]: property identifiers - Example - - .. code-block:: python - - { - "WellControls1": [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1" - ], - "WellControls2": [ - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1" - ] - } - """ - propsPerIdentfier: dict[str, list[str]] = {} - for propertyName in propertyNames: - elements: list[str] = propertyName.split("__") - identifier: str = elements[0] - if identifier not in propsPerIdentfier: - propsPerIdentfier[identifier] = [] - propsPerIdentfier[identifier].append(propertyName) - return propsPerIdentfier - - -def associationIdentifiers(propertyNames: list[str]) -> dict[str, dict[str, list[str]]]: - """Extract identifiers with associatied curves. - - From a list of property names, extracts the identifier (name of the - region for flow property or name of a well for well property) and creates - a dictionnary with identifiers as keys and the properties containing them - for value in a list. - - Args: - propertyNames (list[str]): property names - Example - - .. code-block:: python - - [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls3__BHP__Pa__Source1", - "WellControls3__TotalMassRate__tons/day__Source1", - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - "Mean__BHP__Pa__Source1", - "Mean__TotalMassRate__tons/day__Source1", - "Mean__TotalSurfaceVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1" - ] - - Returns: - dict[str, dict[str, list[str]]]: property identifiers - Example - - .. code-block:: python - - { - "WellControls1": { - 'BHP (Pa)': [ - 'WellControls1__BHP__Pa__Source1' - ], - 'MassRate (kg/s)': [ - 'WellControls1__TotalMassRate__kg/s__Source1' - ], - 'VolumetricRate (m3/s)': [ - 'WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1', - 'WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1', - 'WellControls1__SurfaceVolumetricRateWater__m3/s__Source1' - ] - }, - "WellControls2": { - 'BHP (Pa)': [ - 'WellControls2__BHP__Pa__Source1' - ], - 'MassRate (kg/s)': [ - 'WellControls2__TotalMassRate__kg/s__Source1' - ], - 'VolumetricRate (m3/s)': [ - 'WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1', - 'WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1', - 'WellControls2__SurfaceVolumetricRateWater__m3/s__Source1' - ] - }, - "WellControls3": { - 'BHP (Pa)': [ - 'WellControls3__BHP__Pa__Source1' - ], - 'MassRate (tons/day)': [ - 'WellControls3__TotalMassRate__tons/day__Source1' - ], - 'VolumetricRate (bbl/day)': [ - 'WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1', - 'WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1', - 'WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1' - ] - }, - "Mean": { - 'BHP (Pa)': [ - 'Mean__BHP__Pa__Source1' - ], - 'MassRate (tons/day)': [ - 'Mean__TotalMassRate__tons/day__Source1' - ], - 'VolumetricRate (bbl/day)': [ - 'Mean__TotalSurfaceVolumetricRate__bbl/day__Source1', - 'Mean__SurfaceVolumetricRateCO2__bbl/day__Source1', - 'Mean__SurfaceVolumetricRateWater__bbl/day__Source1' - ] - } - } - """ - propsPerIdentfier: dict[str, list[str]] = propertiesPerIdentifier(propertyNames) - assosIdentifier: dict[str, dict[str, list[str]]] = {} - for ident, propNames in propsPerIdentfier.items(): - assosPropsToAxeType: dict[str, list[str]] = associatePropertyToAxeType( - propNames - ) - assosIdentifier[ident] = assosPropsToAxeType - return assosIdentifier - - -def buildFontTitle(userChoices: dict[str, Any]) -> FontProperties: - """Builds a Fontproperties object according to user choices on title. - - Args: - userChoices (dict[str, Any]): customization parameters. - - Returns: - FontProperties: FontProperties object for the title. - """ - fontTitle: FontProperties = FontProperties() - if "titleStyle" in userChoices: - fontTitle.set_style(userChoices["titleStyle"]) - if "titleWeight" in userChoices: - fontTitle.set_weight(userChoices["titleWeight"]) - if "titleSize" in userChoices: - fontTitle.set_size(userChoices["titleSize"]) - return fontTitle - - -def buildFontVariable(userChoices: dict[str, Any]) -> FontProperties: - """Builds a Fontproperties object according to user choices on variables. - - Args: - userChoices (dict[str, Any]): customization parameters. - - Returns: - FontProperties: FontProperties object for the variable axes. - """ - fontVariable: FontProperties = FontProperties() - if "variableStyle" in userChoices: - fontVariable.set_style(userChoices["variableStyle"]) - if "variableWeight" in userChoices: - fontVariable.set_weight(userChoices["variableWeight"]) - if "variableSize" in userChoices: - fontVariable.set_size(userChoices["variableSize"]) - return fontVariable - - -def buildFontCurves(userChoices: dict[str, Any]) -> FontProperties: - """Builds a Fontproperties object according to user choices on curves. - - Args: - userChoices (dict[str, str]): customization parameters. - - Returns: - FontProperties: FontProperties object for the curves axes. - """ - fontCurves: FontProperties = FontProperties() - if "curvesStyle" in userChoices: - fontCurves.set_style(userChoices["curvesStyle"]) - if "curvesWeight" in userChoices: - fontCurves.set_weight(userChoices["curvesWeight"]) - if "curvesSize" in userChoices: - fontCurves.set_size(userChoices["curvesSize"]) - return fontCurves - - -def customizeLines( - userChoices: dict[str, Any], labels: list[str], linesList: list[lines.Line2D] -) -> list[lines.Line2D]: - """Customize lines according to user choices. - - By applying the user choices, we modify or not the list of lines - and return it with the same number of lines in the same order. - - Args: - userChoices (dict[str, Any]): customization parameters. - labels (list[str]): labels of lines. - linesList (list[lines.Line2D]): list of lines object. - - Returns: - list[lines.Line2D]: list of lines object modified. - """ - if "linesModified" in userChoices: - linesModifs: dict[str, dict[str, Any]] = userChoices["linesModified"] - linesChanged: list[lines.Line2D] = [] - for i, label in enumerate(labels): - if label in linesModifs: - lineChanged: lines.Line2D = applyCustomizationOnLine( - linesList[i], linesModifs[label] - ) - linesChanged.append(lineChanged) - else: - linesChanged.append(linesList[i]) - return linesChanged - else: - return linesList - - -def applyCustomizationOnLine( - line: lines.Line2D, parameters: dict[str, Any] -) -> lines.Line2D: - """Apply modification methods on a line from parameters. - - Args: - line (lines.Line2D): Matplotlib Line2D - parameters (dict[str, Any]): dictionary of { - "linestyle": one of ["-","--","-.",":"] - "linewidth": positive int - "color": color code - "marker": one of ["",".","o","^","s","*","D","+","x"] - "markersize":positive int - } - - Returns: - lines.Line2D: Line2D object modified. - """ - if "linestyle" in parameters: - line.set_linestyle(parameters["linestyle"]) - if "linewidth" in parameters: - line.set_linewidth(parameters["linewidth"]) - if "color" in parameters: - line.set_color(parameters["color"]) - if "marker" in parameters: - line.set_marker(parameters["marker"]) - if "markersize" in parameters: - line.set_markersize(parameters["markersize"]) - return line - - -""" -Layout tools for layering subplots in a figure -""" - - -def isprime(x: int) -> bool: - """Checks if a number is primer or not. - - Args: - x (int): Positive number to test. - - Returns: - bool: True if prime, False if not. - """ - if x < 0: - print("Invalid number entry, needs to be positive int") - return False - - return all(x % n != 0 for n in range(2, int(x**0.5) + 1)) - - -def findClosestPairIntegers(x: int) -> tuple[int, int]: - """Get the pair of integers that multiply the closest to input value. - - Finds the closest pair of integers that when multiplied together, - gives a number the closest to the input number (always above or equal). - - Args: - x (int): Positive number. - - Returns: - tuple[int, int]: (highest int, lowest int) - """ - if x < 4: - return (x, 1) - while isprime(x): - x += 1 - N: int = round(math.sqrt(x)) - while x > N: - if x % N == 0: - M = x // N - highest = max(M, N) - lowest = min(M, N) - return (highest, lowest) - else: - N += 1 - return (x, 1) - - -def smartLayout(x: int, ratio: float) -> tuple[int, int, int]: - """Return the best layout according to the number of subplots. - - For multiple subplots, we need to have a layout that can adapt to - the number of subplots automatically. This function figures out the - best layout possible knowing the number of suplots and the figure ratio. - - Args: - x (int): Positive number. - ratio (float): width to height ratio of a figure. - - Returns: - tuple[int]: (nbr_rows, nbr_columns, number of axes to remove) - """ - pair: tuple[int, int] = findClosestPairIntegers(x) - nbrAxesToRemove: int = pair[0] * pair[1] - x - if ratio < 1: - return (pair[0], pair[1], nbrAxesToRemove) - else: - return (pair[1], pair[0], nbrAxesToRemove) - - -""" -Legend tools -""" - -commonAssociations: dict[str, str] = { - "pressuremin": "Pmin", - "pressureMax": "Pmax", - "pressureaverage": "Pavg", - "deltapressuremin": "DPmin", - "deltapressuremax": "DPmax", - "temperaturemin": "Tmin", - "temperaturemax": "Tmax", - "temperatureaverage": "Tavg", - "effectivestressxx": "ESxx", - "effectivestresszz": "ESzz", - "effectivestressratio": "ESratio", - "totaldisplacementx": "TDx", - "totaldisplacementy": "TDy", - "totaldisplacementz": "TDz", - "totalstressXX": "TSxx", - "totalstressZZ": "TSzz", - "stressxx": "Sxx", - "stressyy": "Syy", - "stresszz": "Szz", - "stressxy": "Sxy", - "stressxz": "Sxz", - "stressyz": "Syz", - "poissonratio": "PR", - "porosity": "PORO", - "specificgravity": "SG", - "theoreticalverticalstress": "TVS", - "density": "DNST", - "pressure": "P", - "permeabilityx": "PERMX", - "permeabilityy": "PERMY", - "permeabilityz": "PERMZ", - "oedometric": "OEDO", - "young": "YOUNG", - "shear": "SHEAR", - "bulk": "BULK", - "totaldynamicporevolume": "TDPORV", - "time": "TIME", - "dt": "DT", - "meanbhp": "MBHP", - "meantotalmassrate": "MTMR", - "meantotalvolumetricrate": "MTSVR", - "bhp": "BHP", - "totalmassrate": "TMR", - "cumulatedlineariter": "CLI", - "cumulatednewtoniter": "CNI", - "lineariter": "LI", - "newtoniter": "NI", -} - -phasesAssociations: dict[str, str] = { - "dissolvedmass": " IN ", - "immobile": "IMOB ", - "mobile": "MOB ", - "nontrapped": "NTRP ", - "dynamicporevolume": "DPORV ", - "meansurfacevolumetricrate": "MSVR ", - "surfacevolumetricrate": "SVR ", -} - - -def smartLabelsSorted( - labels: list[str], lines: list[lines.Line2D], userChoices: dict[str, Any] -) -> tuple[list[str], list[lines.Line2D]]: - """Shorten all legend labels and sort them. - - To improve readability of the legend for an axe in ParaView, we can apply the - smartLegendLabel functionnality to reduce the size of each label. Plus we sort them - alphabetically and therefore, we also sort the lines the same way. - - Args: - labels (list[str]): Labels to use ax.legend() like - ["Region1__TemperatureAvg__K__job_123456", "Region1__PressureMin__Pa__job_123456"] - lines (list[lines.Line2D]): Lines plotted on axes of matplotlib figure like [line1, line2] - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[list[str], list[lines.Line2D]]: Improved labels and sorted labels / lines like - (["Region1 Pmin", "Region1 Tavg"], [line2, line1]) - """ - smartLabels: list[str] = [smartLabel(label, userChoices) for label in labels] - # I need the labels to be ordered alphabetically for better readability of the legend - # Therefore, if I sort smartLabels, I need to also sort lines with the same order. - # But this can only be done if there are no duplicates of labels in smartLabels. - # If a duplicate is found, "sorted" will try to sort with line which has no comparison built in - # which will throw an error. - if len(set(smartLabels)) == len(smartLabels): - sortedBothLists = sorted(zip(smartLabels, lines)) - sortedLabels, sortedLines = zip(*sortedBothLists) - return (list(sortedLabels), list(sortedLines)) - else: - return (smartLabels, lines) - - -def smartLabel(label: str, userChoices: dict[str, Any]) -> str: - """Shorten label according to user choices. - - Labels name can tend to be too long. Therefore, we need to reduce the size of the label. - Depending on the choices made by the user, the identifier and the job name can disappear. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - userChoices (dict[str, Any]): user choices. - - Returns: - str: "phaseName0 in phaseName1" or "Reservoir phaseName0 in phaseName1" - or "phaseName0 in phaseName1 job123456.out" or - "Reservoir phaseName0 in phaseName1 job123456.out" - """ - # first step is to abbreviate the label to reduce its size - smartLabel: str = abbreviateLabel(label) - # When only one source is used as input, there is no need to precise which one is used - # in the label so the job name is useless. Same when removeJobName option is selected by user. - inputNames: list[str] = userChoices["inputNames"] - removeJobName: bool = userChoices["removeJobName"] - if len(inputNames) > 1 and not removeJobName: - jobName: str = findJobName(label) - smartLabel += " " + jobName - # When the user chooses to split the plot into subplots to plot by region or well, - # this identifier name will appear as a title of the subplot so no need to use it. - # Same applies when user decides to remove regions. - plotRegions: bool = userChoices["plotRegions"] - removeRegions: bool = userChoices["removeRegions"] - if not plotRegions and not removeRegions: - smartLabel = findIdentifier(label) + " " + smartLabel - return smartLabel - - -def abbreviateLabel(label: str) -> str: - """Get the abbreviation of the label according to reservoir nomenclature. - - When using labels to plot, the name can tend to be too long. Therefore, to respect - the logic of reservoir engineering vocabulary, abbreviations for common property names - can be used to shorten the name. The goal is therefore to generate the right abbreviation - for the label input. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - - Returns: - str: "phaseName0 in phaseName1" - """ - for commonAsso in commonAssociations: - if commonAsso in label.lower(): - return commonAssociations[commonAsso] - for phaseAsso in phasesAssociations: - if phaseAsso in label.lower(): - phases: list[str] = findPhasesLabel(label) - phase0: str = "" if len(phases) < 1 else phases[0] - phase1: str = "" if len(phases) < 2 else phases[1] - if phaseAsso == "dissolvedmass": - return phase0 + phasesAssociations[phaseAsso] + phase1 - else: - return phasesAssociations[phaseAsso] + phase0 - return label - - -def findIdentifier(label: str) -> str: - """Find identifier inside the label. - - When looking at a label, it may contain or not an identifier at the beginning of it. - An identifier is either a regionName or a wellName. - The goal is to find it and extract it if present. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - - Returns: - str: "Reservoir" - """ - identifier: str = "" - if "__" not in label: - print("Invalid label, cannot search identifier when no '__' in label.") - return identifier - subParts: list[str] = label.split("__") - if len(subParts) == 4: - identifier = subParts[0] - return identifier - - -def findJobName(label: str) -> str: - """Find the Geos job name at the end of the label. - - When looking at a label, it may contain or not a job name at the end of it. - The goal is to find it and extract it if present. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - - Returns: - str: "job123456.out" - """ - jobName: str = "" - if "__" not in label: - print("Invalid label, cannot search jobName when no '__' in label.") - return jobName - subParts: list[str] = label.split("__") - if len(subParts) == 4: - jobName = subParts[3] - return jobName - - -def findPhasesLabel(label: str) -> list[str]: - """Find phase name inside label. - - When looking at a label, it may contain or not patterns that indicates - the presence of a phase name within it. Therefore, if one of these patterns - is present, one or multiple phase names can be found and be extracted. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - - Returns: - list[str]: [phaseName0, phaseName1] - """ - phases: list[str] = [] - lowLabel: str = label.lower() - indexStart: int = 0 - indexEnd: int = 0 - if "__" not in label: - print("Invalid label, cannot search phases when no '__' in label.") - return phases - if "dissolvedmass" in lowLabel: - indexStart = lowLabel.index("dissolvedmass") + len("dissolvedmass") - indexEnd = lowLabel.rfind("__") - phasesSubstring: str = lowLabel[indexStart:indexEnd] - phases = phasesSubstring.split("in") - phases = [phase.capitalize() for phase in phases] - else: - if "dynamicporevolume" in lowLabel: - indexStart = lowLabel.index("__") + 2 - indexEnd = lowLabel.index("dynamicporevolume") - else: - for pattern in ["nontrapped", "trapped", "immobile", "mobile", "rate"]: - if pattern in lowLabel: - indexStart = lowLabel.index(pattern) + len(pattern) - indexEnd = lowLabel.rfind("mass") - if indexEnd < 0: - indexEnd = indexStart + lowLabel[indexStart:].find("__") - break - if indexStart < indexEnd: - phases = [lowLabel[indexStart:indexEnd].capitalize()] - return phases - - -""" -Under this is the first version of smartLabels without abbreviations. -""" - -# def smartLegendLabelsAndLines( -# labelNames: list[str], lines: list[Any], userChoices: dict[str, Any], regionName="" -# ) -> tuple[list[str], list[Any]]: -# """To improve readability of the legend for an axe in ParaView, we can apply the -# smartLegendLabel functionnality to reduce the size of each label. Plus we sort them -# alphabetically and therefore, we also sort the lines the same way. - -# Args: -# labelNames (list[str]): Labels to use ax.legend() like -# ["Region1__PressureMin__Pa__job_123456", "Region1__Temperature__K__job_123456"] -# lines (list[Any]): Lines plotted on axes of matplotlib figure like [line1, line2] -# userChoices (dict[str, Any]): Choices made by widget selection -# in PythonViewConfigurator filter. -# regionName (str, optional): name of the region. Defaults to "". - -# Returns: -# tuple[list[str], list[Any]]: Improved labels and sorted labels / lines like -# (["Temperature K", "PressureMin Pa"], [line2, line1]) -# """ -# smartLabels: list[str] = [ -# smartLegendLabel(labelName, userChoices, regionName) for labelName in labelNames -# ] -# # I need the labels to be ordered alphabetically for better readability of the legend -# # Therefore, if I sort smartLabels, I need to also sort lines with the same order -# sortedBothLists = sorted(zip(smartLabels, lines) -# sortedLabels, sortedLines = zip(*sortedBothLists) -# return (sortedLabels, sortedLines) - - -# def smartLegendLabel(labelName: str, userChoices: dict[str, Any], regionName="") -> str: -# """When plotting legend label, the label format can be improved by removing some -# overwhelming / repetitive prefixe / suffixe and have a shorter label. - -# Args: -# labelName (str): Label to use ax.legend() like -# Region1__PressureMin__Pa__job_123456 -# userChoices (dict[str, Any]): Choices made by widget selection -# in PythonViewConfigurator filter. -# regionName (str, optional): name of the region. Defaults to "". - -# Returns: -# str: Improved label name like PressureMin Pa. -# """ -# smartLabel: str = "" -# # When only one source is used as input, there is no need to precise which one -# # is used in the label. Same when removeJobName option is selected by user. -# inputNames: list[str] = userChoices["inputNames"] -# removeJobName: bool = userChoices["removeJobName"] -# if len(inputNames) <= 1 or removeJobName: -# smartLabel = removeJobNameInLegendLabel(labelName, inputNames) -# # When the user chooses to split the plot into subplots to plot by region, -# # the region name will appear as a title of the subplot so no need to use it. -# # Same applies when user decides to remove regions. -# plotRegions: bool = userChoices["plotRegions"] -# removeRegions: bool = userChoices["removeRegions"] -# if plotRegions or removeRegions: -# smartLabel = removeIdentifierInLegendLabel(smartLabel, regionName) -# smartLabel = smartLabel.replace("__", " ") -# return smartLabel - - -# def removeJobNameInLegendLabel(legendLabel: str, inputNames: list[str]) -> str: -# """When plotting legends, the name of the job is by default at the end of -# the label. Therefore, it can increase tremendously the size of the legend -# and we can avoid that by removing the job name from it. - -# Args: -# legendLabel (str): Label to use ax.legend() like -# Region1__PressureMin__Pa__job_123456 -# inputNames (list[str]): names of the sources use to plot. - -# Returns: -# str: Label without the job name like Region1__PressureMin__Pa. -# """ -# for inputName in inputNames: -# pattern: str = "__" + inputName -# if legendLabel.endswith(pattern): -# jobIndex: int = legendLabel.index(pattern) -# return legendLabel[:jobIndex] -# return legendLabel - - -# def removeIdentifierInLegendLabel(legendLabel: str, regionName="") -> str: -# """When plotting legends, the name of the region is by default at the -# beginning of the label. Here we remove the region name from the legend label. - -# Args: -# legendLabel (str): Label to use ax.legend() like -# Region1__PressureMin__Pa__job_123456 -# regionName (str): name of the region. Defaults to "". - -# Returns: -# str: Label without the job name like PressureMin__Pa__job_123456 -# """ -# if "__" not in legendLabel: -# return legendLabel -# if regionName == "": -# firstRegionIndex: int = legendLabel.index("__") -# return legendLabel[firstRegionIndex + 2:] -# pattern: str = regionName + "__" -# if legendLabel.startswith(pattern): -# return legendLabel[len(pattern):] -# return legendLabel - - -""" -Other 2D tools for simplest figures -""" - - -def basicFigure( - df: pd.DataFrame, variableName: str, curveName: str -) -> tuple[figure.Figure, axes.Axes]: - """Creates a plot. - - Generates a figure and axes objects from matplotlib that plots - one curve along the X axis, with legend and label for X and Y. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName" - variableName (str): Name of the variable column - curveName (str): Name of the column to display along that variable. - - Returns: - tuple[figure.Figure, axes.Axes]: the fig and the ax. - """ - fig, ax = plt.subplots() - x: npt.NDArray[np.float64] = df[variableName].to_numpy() - y: npt.NDArray[np.float64] = df[curveName].to_numpy() - ax.plot(x, y, label=curveName) - ax.set_xlabel(variableName) - ax.set_ylabel(curveName) - ax.legend(loc="best") - return (fig, ax) - - -def invertedBasicFigure( - df: pd.DataFrame, variableName: str, curveName: str -) -> tuple[figure.Figure, axes.Axes]: - """Creates a plot with inverted XY axis. - - Generates a figure and axes objects from matplotlib that plots - one curve along the Y axis, with legend and label for X and Y. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName" - variableName (str): Name of the variable column - curveName (str): Name of the column to display along that variable. - - Returns: - tuple[figure.Figure, axes.Axes]: the fig and the ax. - """ - fig, ax = plt.subplots() - x: npt.NDArray[np.float64] = df[curveName].to_numpy() - y: npt.NDArray[np.float64] = df[variableName].to_numpy() - ax.plot(x, y, label=variableName) - ax.set_xlabel(curveName) - ax.set_ylabel(variableName) - ax.legend(loc="best") - return (fig, ax) - - -def adjust_subplots(fig: figure.Figure, invertXY: bool) -> figure.Figure: - """Adjust the size of the subplot in the fig. - - Args: - fig (figure.Figure): Matplotlib figure - invertXY (bool): Choice to either intervert or not the X and Y axes - - Returns: - figure.Figure: Matplotlib figure with adjustements - """ - if invertXY: - fig.subplots_adjust(left=0.05, right=0.98, top=0.9, bottom=0.2) - else: - fig.subplots_adjust(left=0.06, right=0.94, top=0.95, bottom=0.08) - return fig diff --git a/geos-posp/src/geos_posp/visu/pythonViewUtils/mainPythonView.py b/geos-posp/src/geos_posp/visu/pythonViewUtils/mainPythonView.py deleted file mode 100644 index e211c72a..00000000 --- a/geos-posp/src/geos_posp/visu/pythonViewUtils/mainPythonView.py +++ /dev/null @@ -1,40 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# type: ignore -try: - import matplotlib.pyplot as plt - from paraview import python_view - - import geos_posp.visu.PVUtils.paraviewTreatments as pvt - from geos_posp.visu.pythonViewUtils.Figure2DGenerator import ( - Figure2DGenerator, - ) - - plt.close() - if len(sourceNames) == 0: # noqa: F821 - raise ValueError( - "No source name was found. Please check at least" - + " one source in <>" - ) - - dataframes = pvt.getDataframesFromMultipleVTKSources( - sourceNames, variableName # noqa: F821 - ) - dataframe = pvt.mergeDataframes(dataframes, variableName) # noqa: F821 - obj_figure = Figure2DGenerator(dataframe, userChoices) # noqa: F821 - fig = obj_figure.getFigure() - - def setup_data(view) -> None: # noqa - pass - - def render(view, width: int, height: int): # noqa - fig.set_size_inches(float(width) / 100.0, float(height) / 100.0) - imageToReturn = python_view.figure_to_image(fig) - return imageToReturn - -except Exception as e: - from geos_posp.utils.Logger import getLogger - - logger = getLogger("Python View Configurator") - logger.critical(e, exc_info=True) diff --git a/geos-posp/src/geos_posp/visu/PVUtils/__init__.py b/geos-pv/src/geos_pv/geosLogReaderUtils/__init__.py similarity index 100% rename from geos-posp/src/geos_posp/visu/PVUtils/__init__.py rename to geos-pv/src/geos_pv/geosLogReaderUtils/__init__.py diff --git a/geos-posp/src/geos_posp/visu/pythonViewUtils/__init__.py b/geos-pv/src/geos_pv/pyplotUtils/__init__.py similarity index 100% rename from geos-posp/src/geos_posp/visu/pythonViewUtils/__init__.py rename to geos-pv/src/geos_pv/pyplotUtils/__init__.py diff --git a/geos-posp/src/geos_posp/visu/PVUtils/matplotlibOptions.py b/geos-pv/src/geos_pv/pyplotUtils/matplotlibOptions.py similarity index 100% rename from geos-posp/src/geos_posp/visu/PVUtils/matplotlibOptions.py rename to geos-pv/src/geos_pv/pyplotUtils/matplotlibOptions.py diff --git a/geos-posp/src/geos_posp/visu/PVUtils/DisplayOrganizationParaview.py b/geos-pv/src/geos_pv/utils/DisplayOrganizationParaview.py similarity index 100% rename from geos-posp/src/geos_posp/visu/PVUtils/DisplayOrganizationParaview.py rename to geos-pv/src/geos_pv/utils/DisplayOrganizationParaview.py From c556362c85744617e0ef474029ce5fc35052ddfd Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Wed, 2 Apr 2025 17:53:48 +0200 Subject: [PATCH 03/20] set local dependency path management --- geos-posp/setup.py | 25 +- geos-pv/pyproject.toml | 11 +- geos-pv/requirements.txt | 5 + geos-pv/setup.py | 19 +- geos-pv/src/PVplugins/PVGeosLogReader.py | 309 ++++---- geos-pv/src/PVplugins/__init__.py | 15 + .../GeosLogReaderAquifers.py | 154 ++-- .../GeosLogReaderConvergence.py | 120 ++-- .../geosLogReaderUtils/GeosLogReaderFlow.py | 137 ++-- .../geosLogReaderUtils/GeosLogReaderWells.py | 218 +++--- .../geosLogReaderFunctions.py | 661 ++++++++---------- .../geos_pv/pyplotUtils/matplotlibOptions.py | 81 +-- .../utils/DisplayOrganizationParaview.py | 130 ++-- geos-pv/src/geos_pv/utils/checkboxFunction.py | 6 +- .../src/geos_pv/utils/paraviewTreatments.py | 353 +++++----- 15 files changed, 1032 insertions(+), 1212 deletions(-) create mode 100644 geos-pv/requirements.txt create mode 100644 geos-pv/src/PVplugins/__init__.py diff --git a/geos-posp/setup.py b/geos-posp/setup.py index d933e5f6..bc0de3fa 100644 --- a/geos-posp/setup.py +++ b/geos-posp/setup.py @@ -2,15 +2,18 @@ from setuptools import setup # This is where you add any fancy path resolution to the local lib: -package_name = "geos-utils" -geos_utils_path: str = (Path(__file__).parent.parent / package_name).as_uri() +install_requires_external = [ + "vtk >= 9.3", + "numpy >= 1.26", + "pandas >= 2.2", + "typing_extensions >= 4.12", +] +local_package_names = [ "geos-utils", "geos-geomechanics" ] -setup( - install_requires=[ - "vtk >= 9.3", - "numpy >= 1.26", - "pandas >= 2.2", - "typing_extensions >= 4.12", - f"{package_name} @ {geos_utils_path}", - ] -) \ No newline at end of file +geos_python_packages_path: Path = Path( __file__ ).parent.parent +install_requires_local = [ + f"{name} @ {(geos_python_packages_path / name).as_uri()}" for name in local_package_names + if ( geos_python_packages_path / name ).exists() +] + +setup( install_requires=install_requires_external + install_requires_local ) diff --git a/geos-pv/pyproject.toml b/geos-pv/pyproject.toml index 1328fedf..1b60faac 100644 --- a/geos-pv/pyproject.toml +++ b/geos-pv/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=42", "wheel"] +requires = ["setuptools>=61.2"] build-backend = "setuptools.build_meta" [project] @@ -16,7 +16,9 @@ classifiers = [ "Programming Language :: Python" ] -requires-python = ">=3.9" +requires-python = ">=3.10" + +dynamic = ["dependencies"] dependencies = [ "typing_extensions", @@ -26,8 +28,3 @@ dependencies = [ [project.scripts] - -[tool.mypy] -python_version = "3.9" -warn_return_any = true -warn_unused_configs = true \ No newline at end of file diff --git a/geos-pv/requirements.txt b/geos-pv/requirements.txt new file mode 100644 index 00000000..4a5b598f --- /dev/null +++ b/geos-pv/requirements.txt @@ -0,0 +1,5 @@ +"geos-geomechanics" +"geos-mesh" +"geos-posp" +"geos-prep" +"geos-utils" \ No newline at end of file diff --git a/geos-pv/setup.py b/geos-pv/setup.py index 70b545a8..0a65b791 100644 --- a/geos-pv/setup.py +++ b/geos-pv/setup.py @@ -1,11 +1,16 @@ from pathlib import Path from setuptools import setup -# This is where you add any fancy path resolution to the local lib: -local_path: str = (Path(__file__).parent).as_uri() +# geos python package dependencies are read from requirements.txt +# WARNING: only local dependencies must be included in the requirements.txt -setup( - install_requires=[ - f"geos-utils @ {local_path}", - ] -) \ No newline at end of file +local_package_names = [] +with open( "./requirements.txt" ) as f: + local_package_names = f.read().splitlines() + +geos_python_packages_path: Path = Path( __file__ ).parent.parent +install_requires = [ + f"{name} @ {(geos_python_packages_path / name).as_uri()}" for name in local_package_names + if ( geos_python_packages_path / name ).exists() +] +setup( install_requires=install_requires ) diff --git a/geos-pv/src/PVplugins/PVGeosLogReader.py b/geos-pv/src/PVplugins/PVGeosLogReader.py index d7c5c0eb..09659d71 100644 --- a/geos-pv/src/PVplugins/PVGeosLogReader.py +++ b/geos-pv/src/PVplugins/PVGeosLogReader.py @@ -12,20 +12,16 @@ import pandas as pd # type: ignore[import-untyped] from typing_extensions import Self -dir_path = os.path.dirname(os.path.realpath(__file__)) -parent_dir_path = os.path.dirname(dir_path) +dir_path = os.path.dirname( os.path.realpath( __file__ ) ) +parent_dir_path = os.path.dirname( dir_path ) if parent_dir_path not in sys.path: - sys.path.append(parent_dir_path) + sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path +import PVplugins #required to update sys path import vtkmodules.util.numpy_support as vnp from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, - smdomain, - smhint, - smproperty, - smproxy, + VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, ) from vtk import VTK_DOUBLE # type: ignore[import-untyped] from vtkmodules.vtkCommonCore import vtkDataArraySelection as vtkDAS @@ -46,8 +42,6 @@ from geos_pv.geosLogReaderUtils.GeosLogReaderFlow import GeosLogReaderFlow from geos_pv.geosLogReaderUtils.GeosLogReaderWells import GeosLogReaderWells from geos.utils.enumUnits import ( - - Mass, MassRate, Pressure, @@ -60,11 +54,9 @@ from geos.utils.UnitRepository import UnitRepository from geos_pv.utils.checkboxFunction import ( # type: ignore[attr-defined] - createModifiedCallback, -) + createModifiedCallback, ) from geos_pv.utils.paraviewTreatments import ( - strListToEnumerationDomainXml, -) + strListToEnumerationDomainXml, ) __doc__ = """ PVGeosLogReader is a Paraview plugin that allows to read Geos output log. @@ -88,18 +80,19 @@ @smproxy.reader( name="PVGeosLogReader", label="Geos Log Reader", - extensions=["txt", "out"], + extensions=[ "txt", "out" ], file_description="GEOS log .txt or .out files", ) -class PVGeosLogReader(VTKPythonAlgorithmBase): - def __init__(self: Self) -> None: +class PVGeosLogReader( VTKPythonAlgorithmBase ): + + def __init__( self: Self ) -> None: """Paraview reader for Geos log files ."txt" or ".out". Output is a vtkTable with data extracted from the log. """ - super().__init__(nInputPorts=0, nOutputPorts=1, outputType="vtkTable") + super().__init__( nInputPorts=0, nOutputPorts=1, outputType="vtkTable" ) self.m_filepath: str = "" - self.m_phasesUserChoice: list[str] = [] + self.m_phasesUserChoice: list[ str ] = [] self.m_dataframeChoice: int = 0 self.m_dataframe: pd.DataFrame self.m_numberWellsMean: int = 1 @@ -118,8 +111,8 @@ def __init__(self: Self) -> None: # for selection of properties self.m_propertiesFlow: vtkDAS = vtkDAS() - self.m_propertiesFlow.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - propsFlow: list[str] = [ + self.m_propertiesFlow.AddObserver( "ModifiedEvent", createModifiedCallback( self ) ) # type: ignore[arg-type] + propsFlow: list[ str ] = [ "DeltaPressure", "Pressure", "Temperature", @@ -134,11 +127,11 @@ def __init__(self: Self) -> None: "CellFluidMass", ] for prop in propsFlow: - self.m_propertiesFlow.AddArray(prop) + self.m_propertiesFlow.AddArray( prop ) self.m_propertiesWells: vtkDAS = vtkDAS() - self.m_propertiesWells.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - propsWells: list[str] = [ + self.m_propertiesWells.AddObserver( "ModifiedEvent", createModifiedCallback( self ) ) # type: ignore[arg-type] + propsWells: list[ str ] = [ "MeanBHP", "MeanTotalMassRate", "MeanTotalVolumetricRate", @@ -150,31 +143,30 @@ def __init__(self: Self) -> None: "BHP", ] for prop in propsWells: - self.m_propertiesWells.AddArray(prop) + self.m_propertiesWells.AddArray( prop ) self.m_propertiesAquifers: vtkDAS = vtkDAS() - self.m_propertiesAquifers.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - propsAquifers: list[str] = [ + self.m_propertiesAquifers.AddObserver( "ModifiedEvent", + createModifiedCallback( self ) ) # type: ignore[arg-type] + propsAquifers: list[ str ] = [ "Volume", "VolumetricRate", "CumulatedVolume", "CumulatedVolumetricRate", ] for prop in propsAquifers: - self.m_propertiesAquifers.AddArray(prop) + self.m_propertiesAquifers.AddArray( prop ) self.m_convergence: vtkDAS = vtkDAS() - self.m_convergence.AddObserver("ModifiedEvent", createModifiedCallback(self)) # type: ignore[arg-type] - propsSolvers: list[str] = ["NewtonIter", "LinearIter"] + self.m_convergence.AddObserver( "ModifiedEvent", createModifiedCallback( self ) ) # type: ignore[arg-type] + propsSolvers: list[ str ] = [ "NewtonIter", "LinearIter" ] for prop in propsSolvers: - self.m_convergence.AddArray(prop) + self.m_convergence.AddArray( prop ) - @smproperty.stringvector( - name="DataFilepath", default_values="Enter a filepath to your data" - ) + @smproperty.stringvector( name="DataFilepath", default_values="Enter a filepath to your data" ) @smdomain.filelist() - @smhint.filechooser(extensions=["txt", "out"], file_description="Data files") - def a01SetFilepath(self: Self, filepath: str) -> None: + @smhint.filechooser( extensions=[ "txt", "out" ], file_description="Data files" ) + def a01SetFilepath( self: Self, filepath: str ) -> None: """Set Geos log file path. Args: @@ -184,13 +176,13 @@ def a01SetFilepath(self: Self, filepath: str) -> None: FileNotFoundError: file not found. """ if filepath != "Enter a filepath to your data": - if not os.path.exists(filepath): - raise FileNotFoundError(f"Invalid filepath {filepath}") + if not os.path.exists( filepath ): + raise FileNotFoundError( f"Invalid filepath {filepath}" ) else: self.m_filepath = filepath self.Modified() - def getFilepath(self: Self) -> str: + def getFilepath( self: Self ) -> str: """Get Geos log file path. Returns: @@ -198,24 +190,20 @@ def getFilepath(self: Self) -> str: """ return self.m_filepath - @smproperty.stringvector( - name="EnterPhaseNames", label="Enter Phase Names", default_values="" - ) - @smdomain.xml( - """ + @smproperty.stringvector( name="EnterPhaseNames", label="Enter Phase Names", default_values="" ) + @smdomain.xml( """ Please enter the phase names as in the form: phase0, phase1, phase2 - """ - ) - def a02SetPhaseNames(self: Self, value: str) -> None: + """ ) + def a02SetPhaseNames( self: Self, value: str ) -> None: """Set phase names. Args: value (str): list of phase names separated by space. """ - self.m_phasesUserChoice = transformUserChoiceToListPhases(value) + self.m_phasesUserChoice = transformUserChoiceToListPhases( value ) self.Modified() - def getPhasesUserChoice(self: Self) -> list[str]: + def getPhasesUserChoice( self: Self ) -> list[ str ]: """Access the phases from the user input. Returns: @@ -229,10 +217,8 @@ def getPhasesUserChoice(self: Self) -> list[str]: label="DataframeChoice", default_values=0, ) - @smdomain.xml( - strListToEnumerationDomainXml(["Flow", "Wells", "Aquifers", "Convergence"]) - ) - def a03SetDataFrameChoice(self: Self, value: int) -> None: + @smdomain.xml( strListToEnumerationDomainXml( [ "Flow", "Wells", "Aquifers", "Convergence" ] ) ) + def a03SetDataFrameChoice( self: Self, value: int ) -> None: """Set reader choice: 0:Flow, 1:Wells, 2:Aquifers, 3:Convergence. Args: @@ -241,7 +227,7 @@ def a03SetDataFrameChoice(self: Self, value: int) -> None: self.m_dataframeChoice = value self.Modified() - def getDataframeChoice(self: Self) -> int: + def getDataframeChoice( self: Self ) -> int: """Accesses the choice of dataframe from the user. Returns: @@ -252,40 +238,36 @@ def getDataframeChoice(self: Self) -> int: """ return self.m_dataframeChoice - @smproperty.xml( - """ + @smproperty.xml( """ - """ - ) - def a04PropertyGroup(self: Self) -> None: + """ ) + def a04PropertyGroup( self: Self ) -> None: """Organized group.""" self.Modified() - @smproperty.dataarrayselection(name="FlowProperties") - def a05SetPropertiesFlow(self: Self) -> vtkDAS: + @smproperty.dataarrayselection( name="FlowProperties" ) + def a05SetPropertiesFlow( self: Self ) -> vtkDAS: """Use Flow.""" return self.m_propertiesFlow - @smproperty.xml( - """ + @smproperty.xml( """ - """ - ) - def a06GroupFlow(self: Self) -> None: + """ ) + def a06GroupFlow( self: Self ) -> None: """Organized group.""" self.Modified() - @smproperty.dataarrayselection(name="WellsProperties") - def a07SetPropertiesWells(self: Self) -> vtkDAS: + @smproperty.dataarrayselection( name="WellsProperties" ) + def a07SetPropertiesWells( self: Self ) -> vtkDAS: """Use wells.""" return self.m_propertiesWells - @smproperty.intvector(name="NumberOfWellsForMeanCalculation", default_values=1) - def a08SetTheNumberOfWellsMean(self: Self, number: int) -> None: + @smproperty.intvector( name="NumberOfWellsForMeanCalculation", default_values=1 ) + def a08SetTheNumberOfWellsMean( self: Self, number: int ) -> None: """Set number of wells. Args: @@ -294,7 +276,7 @@ def a08SetTheNumberOfWellsMean(self: Self, number: int) -> None: self.m_numberWellsMean = number self.Modified() - def getNumberOfWellsMean(self: Self) -> int: + def getNumberOfWellsMean( self: Self ) -> int: """Get the number of wells. Returns: @@ -302,51 +284,45 @@ def getNumberOfWellsMean(self: Self) -> int: """ return self.m_numberWellsMean - @smproperty.xml( - """ + @smproperty.xml( """ - """ - ) - def a09GroupWells(self: Self) -> None: + """ ) + def a09GroupWells( self: Self ) -> None: """Organized group.""" self.Modified() - @smproperty.dataarrayselection(name="AquifersProperties") - def a10SetPropertiesAquifers(self: Self) -> vtkDAS: + @smproperty.dataarrayselection( name="AquifersProperties" ) + def a10SetPropertiesAquifers( self: Self ) -> vtkDAS: """Use aquifers.""" return self.m_propertiesAquifers - @smproperty.xml( - """ + @smproperty.xml( """ - """ - ) - def a11GroupAquifers(self: Self) -> None: + """ ) + def a11GroupAquifers( self: Self ) -> None: """Organized group.""" self.Modified() - @smproperty.dataarrayselection(name="Convergence") - def a12SetConvergence(self: Self) -> vtkDAS: + @smproperty.dataarrayselection( name="Convergence" ) + def a12SetConvergence( self: Self ) -> vtkDAS: """Use convergence.""" return self.m_convergence - @smproperty.xml( - """ + @smproperty.xml( """ - """ - ) - def a13GroupSolvers(self: Self) -> None: + """ ) + def a13GroupSolvers( self: Self ) -> None: """Organized group.""" self.Modified() - def getIdsToUse(self: Self) -> list[str]: + def getIdsToUse( self: Self ) -> list[ str ]: """Get property ids. Using the checkbox choices of the user for metaproperties, @@ -356,28 +332,28 @@ def getIdsToUse(self: Self) -> list[str]: Returns: list(str): Ids of the metaproperties. """ - dataArrays: dict[int, vtkDAS] = { + dataArrays: dict[ int, vtkDAS ] = { 0: self.m_propertiesFlow, 1: self.m_propertiesWells, 2: self.m_propertiesAquifers, 3: self.m_convergence, } - dataArrayToUse = dataArrays[self.getDataframeChoice()] - propertyNames: list[str] = [] - for i in range(dataArrayToUse.GetNumberOfArrays()): - propName: str = dataArrayToUse.GetArrayName(i) - if dataArrayToUse.ArrayIsEnabled(propName) == 1: - propertyNames.append(propName) - propertiesWithId: list[str] = identifyProperties(propertyNames) - onlyIds: list[str] = [] + dataArrayToUse = dataArrays[ self.getDataframeChoice() ] + propertyNames: list[ str ] = [] + for i in range( dataArrayToUse.GetNumberOfArrays() ): + propName: str = dataArrayToUse.GetArrayName( i ) + if dataArrayToUse.ArrayIsEnabled( propName ) == 1: + propertyNames.append( propName ) + propertiesWithId: list[ str ] = identifyProperties( propertyNames ) + onlyIds: list[ str ] = [] for propId in propertiesWithId: - idFound: str = propId.split(":")[0] - onlyIds.append(idFound) + idFound: str = propId.split( ":" )[ 0 ] + onlyIds.append( idFound ) return onlyIds - @smproperty.intvector(name="UseSIUnits", label="UseSIUnits", default_values=1) - @smdomain.xml("""""") - def b01SetUseSIUnits(self: Self, value: int) -> None: + @smproperty.intvector( name="UseSIUnits", label="UseSIUnits", default_values=1 ) + @smdomain.xml( """""" ) + def b01SetUseSIUnits( self: Self, value: int ) -> None: """Set Use SI Units. Args: @@ -386,11 +362,9 @@ def b01SetUseSIUnits(self: Self, value: int) -> None: self.m_useSIUnits = value self.Modified() - @smproperty.intvector( - name="Pressure", label="Pressure", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Pressure))) - def b02SetPressureUnit(self: Self, value: int) -> None: + @smproperty.intvector( name="Pressure", label="Pressure", default_values=0, panel_visibility="default" ) + @smdomain.xml( enumerationDomainUnit( cast( Enum, Pressure ) ) ) + def b02SetPressureUnit( self: Self, value: int ) -> None: """Set pressure unit. Args: @@ -399,11 +373,9 @@ def b02SetPressureUnit(self: Self, value: int) -> None: self.m_pressureUnit = value self.Modified() - @smproperty.intvector( - name="BHP", label="BHP", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Pressure))) - def b03SetBHPUnit(self: Self, value: int) -> None: + @smproperty.intvector( name="BHP", label="BHP", default_values=0, panel_visibility="default" ) + @smdomain.xml( enumerationDomainUnit( cast( Enum, Pressure ) ) ) + def b03SetBHPUnit( self: Self, value: int ) -> None: """Set BHP unit. Args: @@ -412,11 +384,9 @@ def b03SetBHPUnit(self: Self, value: int) -> None: self.m_bhpUnit = value self.Modified() - @smproperty.intvector( - name="Time", label="Time", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Time))) - def b04SetTimeUnit(self: Self, value: int) -> None: + @smproperty.intvector( name="Time", label="Time", default_values=0, panel_visibility="default" ) + @smdomain.xml( enumerationDomainUnit( cast( Enum, Time ) ) ) + def b04SetTimeUnit( self: Self, value: int ) -> None: """Set time unit. Args: @@ -425,11 +395,9 @@ def b04SetTimeUnit(self: Self, value: int) -> None: self.m_timeUnit = value self.Modified() - @smproperty.intvector( - name="Mass", label="Mass", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Mass))) - def b05SetMassUnit(self: Self, value: int) -> None: + @smproperty.intvector( name="Mass", label="Mass", default_values=0, panel_visibility="default" ) + @smdomain.xml( enumerationDomainUnit( cast( Enum, Mass ) ) ) + def b05SetMassUnit( self: Self, value: int ) -> None: """Set mass unit. Args: @@ -438,11 +406,9 @@ def b05SetMassUnit(self: Self, value: int) -> None: self.m_massUnit = value self.Modified() - @smproperty.intvector( - name="Volume", label="Volume", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, Volume))) - def b06SetVolumeUnit(self: Self, value: int) -> None: + @smproperty.intvector( name="Volume", label="Volume", default_values=0, panel_visibility="default" ) + @smdomain.xml( enumerationDomainUnit( cast( Enum, Volume ) ) ) + def b06SetVolumeUnit( self: Self, value: int ) -> None: """Set volume unit. Args: @@ -457,8 +423,8 @@ def b06SetVolumeUnit(self: Self, value: int) -> None: default_values=0, panel_visibility="default", ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, VolumetricRate))) - def b07SetVolumetricRateUnit(self: Self, value: int) -> None: + @smdomain.xml( enumerationDomainUnit( cast( Enum, VolumetricRate ) ) ) + def b07SetVolumetricRateUnit( self: Self, value: int ) -> None: """Set volumetric rate unit. Args: @@ -467,11 +433,9 @@ def b07SetVolumetricRateUnit(self: Self, value: int) -> None: self.m_volumetricRateUnit = value self.Modified() - @smproperty.intvector( - name="MassRate", label="MassRate", default_values=0, panel_visibility="default" - ) - @smdomain.xml(enumerationDomainUnit(cast(Enum, MassRate))) - def b08SetMassRateUnit(self: Self, value: int) -> None: + @smproperty.intvector( name="MassRate", label="MassRate", default_values=0, panel_visibility="default" ) + @smdomain.xml( enumerationDomainUnit( cast( Enum, MassRate ) ) ) + def b08SetMassRateUnit( self: Self, value: int ) -> None: """Set Mass rate unit. Args: @@ -481,8 +445,7 @@ def b08SetMassRateUnit(self: Self, value: int) -> None: self.m_massRateUnit = value self.Modified() - @smproperty.xml( - """ + @smproperty.xml( """ @@ -492,13 +455,12 @@ def b08SetMassRateUnit(self: Self, value: int) -> None: - """ - ) - def b09GroupUnitsToUse(self: Self) -> None: + """ ) + def b09GroupUnitsToUse( self: Self ) -> None: """Organize group.""" self.Modified() - def getUseSIUnits(self: Self) -> int: + def getUseSIUnits( self: Self ) -> int: """Acess the choice to use SI units or not. Returns: @@ -506,7 +468,7 @@ def getUseSIUnits(self: Self) -> int: """ return self.m_useSIUnits - def getUnitChoices(self: Self) -> dict[str, int]: + def getUnitChoices( self: Self ) -> dict[ str, int ]: """Get the units choosen by the user. Based on the choice of using SI units or not, and if @@ -518,7 +480,7 @@ def getUnitChoices(self: Self) -> dict[str, int]: dict[str, int]: empty dictionary if use SI unit, or property name as keys and unit choice as values. """ - unitChoices: dict[str, int] = {} + unitChoices: dict[ str, int ] = {} if not self.getUseSIUnits(): unitChoices = { "pressure": self.m_pressureUnit, @@ -533,18 +495,18 @@ def getUnitChoices(self: Self) -> dict[str, int]: } return unitChoices - def createDataframe(self: Self) -> pd.DataFrame: + def createDataframe( self: Self ) -> pd.DataFrame: """Create dataframe with values from Geos log based on user choices. Returns: pd.DataFrame: Dataframe with log values according to user choice. """ filepath: str = self.getFilepath() - phaseNames: list[str] = self.getPhasesUserChoice() + phaseNames: list[ str ] = self.getPhasesUserChoice() choice: int = self.getDataframeChoice() - userPropertiesUnits: dict[str, int] = self.getUnitChoices() - unitObj: UnitRepository = UnitRepository(userPropertiesUnits) - propertiesUnit: dict[str, Unit] = unitObj.getPropertiesUnit() + userPropertiesUnits: dict[ str, int ] = self.getUnitChoices() + unitObj: UnitRepository = UnitRepository( userPropertiesUnits ) + propertiesUnit: dict[ str, Unit ] = unitObj.getPropertiesUnit() reader: Union[ GeosLogReaderFlow, GeosLogReaderWells, @@ -552,20 +514,20 @@ def createDataframe(self: Self) -> pd.DataFrame: GeosLogReaderConvergence, ] if choice == 0: - reader = GeosLogReaderFlow(filepath, propertiesUnit, phaseNames) + reader = GeosLogReaderFlow( filepath, propertiesUnit, phaseNames ) elif choice == 1: nbrWells: int = self.getNumberOfWellsMean() - reader = GeosLogReaderWells(filepath, propertiesUnit, phaseNames, nbrWells) + reader = GeosLogReaderWells( filepath, propertiesUnit, phaseNames, nbrWells ) elif choice == 2: - reader = GeosLogReaderAquifers(filepath, propertiesUnit) + reader = GeosLogReaderAquifers( filepath, propertiesUnit ) elif choice == 3: - reader = GeosLogReaderConvergence(filepath, propertiesUnit) + reader = GeosLogReaderConvergence( filepath, propertiesUnit ) return reader.createDataframe() def RequestInformation( self: Self, request: vtkInformation, # noqa: F841 - inInfoVec: list[vtkInformationVector], # noqa: F841 + inInfoVec: list[ vtkInformationVector ], # noqa: F841 outInfoVec: vtkInformationVector, ) -> int: """Inherited from VTKPythonAlgorithmBase::RequestInformation. @@ -579,15 +541,15 @@ def RequestInformation( int: 1 if calculation successfully ended, 0 otherwise. """ executive = self.GetExecutive() - outInfo = outInfoVec.GetInformationObject(0) - outInfo.Remove(executive.TIME_STEPS()) - outInfo.Remove(executive.TIME_RANGE()) + outInfo = outInfoVec.GetInformationObject( 0 ) + outInfo.Remove( executive.TIME_STEPS() ) + outInfo.Remove( executive.TIME_RANGE() ) return 1 def RequestData( self: Self, request: vtkInformation, # noqa: F841 - inInfoVec: list[vtkInformationVector], # noqa: F841 + inInfoVec: list[ vtkInformationVector ], # noqa: F841 outInfoVec: vtkInformationVector, ) -> int: """Inherited from VTKPythonAlgorithmBase::RequestData. @@ -605,26 +567,27 @@ def RequestData( idsToUse = self.getIdsToUse() dataframe = self.createDataframe() usefulColumns = [] - for column_name in list(dataframe.columns): + for column_name in list( dataframe.columns ): if ":" not in column_name: - usefulColumns.append(column_name) + usefulColumns.append( column_name ) else: - idFound = column_name.split(":")[0] + idFound = column_name.split( ":" )[ 0 ] if idFound in idsToUse: - usefulColumns.append(column_name) + usefulColumns.append( column_name ) # we build the output vtkTable - output: vtkTable = vtkTable.GetData(outInfoVec, 0) + output: vtkTable = vtkTable.GetData( outInfoVec, 0 ) for column in usefulColumns: - pandas_series: pd.Series = dataframe[column] - array: npt.NDArray[np.float64] = pandas_series.values + pandas_series: pd.Series = dataframe[ column ] + array: npt.NDArray[ np.float64 ] = pandas_series.values if ":" in column: - column = column.split(":")[1] + column = column.split( ":" )[ 1 ] - newAttr: vtkDoubleArray = vnp.numpy_to_vtk(array, deep=True, array_type=VTK_DOUBLE) # type: ignore[no-untyped-call] - newAttr.SetName(column) - output.AddColumn(newAttr) + newAttr: vtkDoubleArray = vnp.numpy_to_vtk( array, deep=True, + array_type=VTK_DOUBLE ) # type: ignore[no-untyped-call] + newAttr.SetName( column ) + output.AddColumn( newAttr ) except Exception as e: - print("Error while reading Geos log file:") - print(str(e)) + print( "Error while reading Geos log file:" ) + print( str( e ) ) return 0 return 1 diff --git a/geos-pv/src/PVplugins/__init__.py b/geos-pv/src/PVplugins/__init__.py new file mode 100644 index 00000000..c646673a --- /dev/null +++ b/geos-pv/src/PVplugins/__init__.py @@ -0,0 +1,15 @@ +import os +import sys + +# Add other packages path to sys path +dir_path = os.path.dirname( os.path.realpath( __file__ ) ) +python_root = '../../..' + +python_modules = [ "geos-pv" ] +with open( "./requirements.txt" ) as f: + python_modules += f.read().splitlines() + +for m in python_modules: + m_path = os.path.abspath( os.path.join( dir_path, python_root, m, 'src' ) ) + if m_path not in sys.path: + sys.path.insert( 0, m_path ) diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py index 2fae08d4..32995f2b 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py @@ -11,7 +11,8 @@ class GeosLogReaderAquifers: - def __init__(self: Self, filepath: str, propertiesUnit: dict[str, Unit]) -> None: + + def __init__( self: Self, filepath: str, propertiesUnit: dict[ str, Unit ] ) -> None: """Reader for Aquifer. Args: @@ -19,21 +20,18 @@ def __init__(self: Self, filepath: str, propertiesUnit: dict[str, Unit]) -> None propertiesUnit ( dict[str, Unit]): unit preferences """ self.m_propertiesUnit = propertiesUnit - self.m_aquiferNames: list[str] = [] - self.m_aquifersPropertiesValues: dict[str, list[float]] = {} - self.m_timesteps: list[float] = [] - - toFindInLog: list[str] = ["_pressureInfluence_table", "Time: 0"] - if not fcts.elementsAreInLog(filepath, toFindInLog): - print( - "Invalid Geos log file. Please check that your log" - + " did not crash and contains aquifers." - ) + self.m_aquiferNames: list[ str ] = [] + self.m_aquifersPropertiesValues: dict[ str, list[ float ] ] = {} + self.m_timesteps: list[ float ] = [] + + toFindInLog: list[ str ] = [ "_pressureInfluence_table", "Time: 0" ] + if not fcts.elementsAreInLog( filepath, toFindInLog ): + print( "Invalid Geos log file. Please check that your log" + " did not crash and contains aquifers." ) else: - self.readAll(filepath) + self.readAll( filepath ) self.calculateExtraValues() - def readAquiferNames(self: Self, file: TextIOBase) -> tuple[str, int]: + def readAquiferNames( self: Self, file: TextIOBase ) -> tuple[ str, int ]: """Initialize the m_aquiferNames attribute by reading log file. Args: @@ -44,21 +42,19 @@ def readAquiferNames(self: Self, file: TextIOBase) -> tuple[str, int]: The id of the last line read that contained the tag "_pressureInfluence_table"., which will be the line containing the first positive timestep at 0s. """ - aquiferNames: list[str] = [] + aquiferNames: list[ str ] = [] line: str = file.readline() id_line = 1 - while not line.startswith("Time: 0"): + while not line.startswith( "Time: 0" ): if "_pressureInfluence_table" in line: - aquiferName: str = fcts.extractAquifer(line) - aquiferNames.append(aquiferName) + aquiferName: str = fcts.extractAquifer( line ) + aquiferNames.append( aquiferName ) line = file.readline() id_line += 1 self.m_aquiferNames = aquiferNames - return (line, id_line) + return ( line, id_line ) - def readPropertiesValues( - self: Self, file: TextIOBase, line: str, id_line: int, total_lines: int - ) -> None: + def readPropertiesValues( self: Self, file: TextIOBase, line: str, id_line: int, total_lines: int ) -> None: """Read aquifer property values from geos log file. Initialize the m_aquifersPropertiesValues and m_timesteps attributes by reading @@ -72,108 +68,100 @@ def readPropertiesValues( id_line (int): The id of the last line read in readPhaseNames. total_lines (int): The number of lines in the file. """ - aquifsPropertiesValues: dict[str, list[float]] = {} + aquifsPropertiesValues: dict[ str, list[ float ] ] = {} for aquifName in self.m_aquiferNames: propVolume: str = aquifName + "__Volume" - propVolumeId: str = fcts.identifyProperties([propVolume])[0] + propVolumeId: str = fcts.identifyProperties( [ propVolume ] )[ 0 ] propRate: str = aquifName + "__VolumetricRate" - propRateId: str = fcts.identifyProperties([propRate])[0] - aquifsPropertiesValues[propVolumeId] = [0.0] - aquifsPropertiesValues[propRateId] = [0.0] - newTimestep, currentDT = fcts.extractTimeAndDt(line) - timesteps: list[float] = [newTimestep] + propRateId: str = fcts.identifyProperties( [ propRate ] )[ 0 ] + aquifsPropertiesValues[ propVolumeId ] = [ 0.0 ] + aquifsPropertiesValues[ propRateId ] = [ 0.0 ] + newTimestep, currentDT = fcts.extractTimeAndDt( line ) + timesteps: list[ float ] = [ newTimestep ] line = file.readline() id_line += 1 while id_line <= total_lines: - if line.startswith("Time:"): - newTimestep, currentDT = fcts.extractTimeAndDt(line) - newTimestep = fcts.convertValues( - ["Time"], [newTimestep], self.m_propertiesUnit - )[0] + if line.startswith( "Time:" ): + newTimestep, currentDT = fcts.extractTimeAndDt( line ) + newTimestep = fcts.convertValues( [ "Time" ], [ newTimestep ], self.m_propertiesUnit )[ 0 ] if " produces a flux of " in line: - if newTimestep not in timesteps and newTimestep > max( - timesteps, default=0.0 - ): - timesteps.append(newTimestep) + if newTimestep not in timesteps and newTimestep > max( timesteps, default=0.0 ): + timesteps.append( newTimestep ) for key in aquifsPropertiesValues: - aquifsPropertiesValues[key].append(0.0) - aquifName, volume = fcts.extractValueAndNameAquifer(line) + aquifsPropertiesValues[ key ].append( 0.0 ) + aquifName, volume = fcts.extractValueAndNameAquifer( line ) rate: float = volume / currentDT propVol: str = aquifName + "__Volume" - propVolId: str = fcts.identifyProperties([propVol])[0] + propVolId: str = fcts.identifyProperties( [ propVol ] )[ 0 ] propRate = aquifName + "__VolumetricRate" - propRateId = fcts.identifyProperties([propRate])[0] - aquifsPropertiesValues[propVolId][-1] = fcts.convertValues( - [propVol], [volume], self.m_propertiesUnit - )[0] - aquifsPropertiesValues[propRateId][-1] = fcts.convertValues( - [propRate], [rate], self.m_propertiesUnit - )[0] + propRateId = fcts.identifyProperties( [ propRate ] )[ 0 ] + aquifsPropertiesValues[ propVolId ][ -1 ] = fcts.convertValues( [ propVol ], [ volume ], + self.m_propertiesUnit )[ 0 ] + aquifsPropertiesValues[ propRateId ][ -1 ] = fcts.convertValues( [ propRate ], [ rate ], + self.m_propertiesUnit )[ 0 ] line = file.readline() id_line += 1 self.m_aquifersPropertiesValues = aquifsPropertiesValues self.m_timesteps = timesteps - def readAll(self: Self, filepath: str) -> None: + def readAll( self: Self, filepath: str ) -> None: """Initialize all the attributes of the class by reading a Geos log file. Args: filepath (str): Geos log filepath. """ - with open(filepath) as geosFile: - total_lines: int = fcts.countNumberLines(filepath) - line, id_line = self.readAquiferNames(geosFile) - self.readPropertiesValues(geosFile, line, id_line, total_lines) + with open( filepath ) as geosFile: + total_lines: int = fcts.countNumberLines( filepath ) + line, id_line = self.readAquiferNames( geosFile ) + self.readPropertiesValues( geosFile, line, id_line, total_lines ) - def calculateExtraValues(self: Self) -> None: + def calculateExtraValues( self: Self ) -> None: """Add cumulated columns for each aquifer volume and aquifer rate.""" for aquifName in self.m_aquiferNames: propVolume: str = aquifName + "__Volume" - propVolumeId: str = fcts.identifyProperties([propVolume])[0] + propVolumeId: str = fcts.identifyProperties( [ propVolume ] )[ 0 ] propRate: str = aquifName + "__VolumetricRate" - propRateId: str = fcts.identifyProperties([propRate])[0] - volumes: list[float] = self.m_aquifersPropertiesValues[propVolumeId] - rates: list[float] = self.m_aquifersPropertiesValues[propRateId] + propRateId: str = fcts.identifyProperties( [ propRate ] )[ 0 ] + volumes: list[ float ] = self.m_aquifersPropertiesValues[ propVolumeId ] + rates: list[ float ] = self.m_aquifersPropertiesValues[ propRateId ] cumuVol_name = aquifName + "__CumulatedVolume" - cumuVolId: str = fcts.identifyProperties([cumuVol_name])[0] + cumuVolId: str = fcts.identifyProperties( [ cumuVol_name ] )[ 0 ] cumuRate_name = aquifName + "__CumulatedVolumetricRate" - cumuRateId: str = fcts.identifyProperties([cumuRate_name])[0] - cumuVol_values: list[float] = [volumes[0]] - cumuRate_values: list[float] = [rates[0]] - for i in range(1, len(volumes)): - cumuVol_values.append(cumuVol_values[i - 1] + volumes[i]) - cumuRate_values.append(cumuRate_values[i - 1] + rates[i]) - self.m_aquifersPropertiesValues[cumuVolId] = cumuVol_values - self.m_aquifersPropertiesValues[cumuRateId] = cumuRate_values - - def createDataframe(self: Self) -> pd.DataFrame: + cumuRateId: str = fcts.identifyProperties( [ cumuRate_name ] )[ 0 ] + cumuVol_values: list[ float ] = [ volumes[ 0 ] ] + cumuRate_values: list[ float ] = [ rates[ 0 ] ] + for i in range( 1, len( volumes ) ): + cumuVol_values.append( cumuVol_values[ i - 1 ] + volumes[ i ] ) + cumuRate_values.append( cumuRate_values[ i - 1 ] + rates[ i ] ) + self.m_aquifersPropertiesValues[ cumuVolId ] = cumuVol_values + self.m_aquifersPropertiesValues[ cumuRateId ] = cumuRate_values + + def createDataframe( self: Self ) -> pd.DataFrame: """Create and fill and return dataframeAquifers. Returns: pd.DataFrame: dataframe with values from Geos log. """ try: - colNames: list[str] = [] - colValues: list[float] = [] + colNames: list[ str ] = [] + colValues: list[ float ] = [] for propName, values in self.m_aquifersPropertiesValues.items(): - unitObj: Unit = self.m_propertiesUnit["nounit"] + unitObj: Unit = self.m_propertiesUnit[ "nounit" ] for propertyType in self.m_propertiesUnit: if propertyType.lower() in propName.lower(): - unitObj = self.m_propertiesUnit[propertyType] + unitObj = self.m_propertiesUnit[ propertyType ] break if unitObj.unitLabel == "": - raise ValueError( - "No unit was found for this property name <<" + propName + ">>." - ) + raise ValueError( "No unit was found for this property name <<" + propName + ">>." ) columnName: str = propName + "__" + unitObj.unitLabel - colNames.append(columnName) - colValues.append(values) # type: ignore[arg-type] - timeUnit: Unit = self.m_propertiesUnit["time"] + colNames.append( columnName ) + colValues.append( values ) # type: ignore[arg-type] + timeUnit: Unit = self.m_propertiesUnit[ "time" ] timeName: str = "Time__" + timeUnit.unitLabel - colNames.append(timeName) - colValues.append(self.m_timesteps) # type: ignore[arg-type] - data = {colNames[i]: colValues[i] for i in range(len(colNames))} - dataframeAquifers: pd.DataFrame = pd.DataFrame(data) + colNames.append( timeName ) + colValues.append( self.m_timesteps ) # type: ignore[arg-type] + data = { colNames[ i ]: colValues[ i ] for i in range( len( colNames ) ) } + dataframeAquifers: pd.DataFrame = pd.DataFrame( data ) return dataframeAquifers except ValueError as err: - print(err.args[0]) + print( err.args[ 0 ] ) diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py index 9dd1bb85..79fa6d51 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py @@ -11,26 +11,27 @@ class GeosLogReaderConvergence: - def __init__(self: Self, filepath: str, propertiesUnit: dict[str, Unit]) -> None: + + def __init__( self: Self, filepath: str, propertiesUnit: dict[ str, Unit ] ) -> None: """Reader for Convergence information. Args: filepath (str): path to geos log file. propertiesUnit ( dict[str, Unit]): unit preferences """ - self.m_propertiesUnit: dict[str, Unit] = propertiesUnit - self.m_solversIterationsValues: dict[str, list[float]] = {} - self.m_timesteps: list[float] = [] - self.m_dts: list[float] = [] + self.m_propertiesUnit: dict[ str, Unit ] = propertiesUnit + self.m_solversIterationsValues: dict[ str, list[ float ] ] = {} + self.m_timesteps: list[ float ] = [] + self.m_dts: list[ float ] = [] - toFindInLog: list[str] = ["Time:"] - if not fcts.elementsAreInLog(filepath, toFindInLog): - print("Invalid Geos log file. Please check that your log did not crash.") + toFindInLog: list[ str ] = [ "Time:" ] + if not fcts.elementsAreInLog( filepath, toFindInLog ): + print( "Invalid Geos log file. Please check that your log did not crash." ) else: - self.readAll(filepath) + self.readAll( filepath ) self.calculateExtraValues() - def readIterationsValues(self: Self, file: TextIOBase, total_lines: int) -> None: + def readIterationsValues( self: Self, file: TextIOBase, total_lines: int ) -> None: """Read iteration values from Geos log file. Initialize the m_aquifersPropertiesValues and m_timesteps attributes @@ -42,90 +43,83 @@ def readIterationsValues(self: Self, file: TextIOBase, total_lines: int) -> None file (TextIOBase): Geos Log file total_lines (int): The number of lines in the file. """ - newtonIterId, linearIterId = fcts.identifyProperties( - ["NewtonIter", "LinearIter"] - ) - iterationsValues: dict[str, list[float]] = {newtonIterId: [], linearIterId: []} - timesteps: list[float] = [] - dts: list[float] = [] + newtonIterId, linearIterId = fcts.identifyProperties( [ "NewtonIter", "LinearIter" ] ) + iterationsValues: dict[ str, list[ float ] ] = { newtonIterId: [], linearIterId: [] } + timesteps: list[ float ] = [] + dts: list[ float ] = [] line: str = file.readline() id_line = 1 - while not line.startswith("Time:"): + while not line.startswith( "Time:" ): line = file.readline() id_line += 1 while id_line <= total_lines: - if line.startswith("Time:"): - timestep, dt = fcts.extractTimeAndDt(line) - timestep, dt = fcts.convertValues( - ["Time", "Time"], [timestep, dt], self.m_propertiesUnit - ) - if timestep > max(timesteps, default=-9.9e99): - timesteps.append(timestep) - dts.append(dt) - iterationsValues[newtonIterId].append(0.0) - iterationsValues[linearIterId].append(0.0) + if line.startswith( "Time:" ): + timestep, dt = fcts.extractTimeAndDt( line ) + timestep, dt = fcts.convertValues( [ "Time", "Time" ], [ timestep, dt ], self.m_propertiesUnit ) + if timestep > max( timesteps, default=-9.9e99 ): + timesteps.append( timestep ) + dts.append( dt ) + iterationsValues[ newtonIterId ].append( 0.0 ) + iterationsValues[ linearIterId ].append( 0.0 ) elif "NewtonIter:" in line: - newtonIter: int = fcts.extractNewtonIter(line) + newtonIter: int = fcts.extractNewtonIter( line ) if newtonIter > 0: - iterationsValues[newtonIterId][-1] += 1.0 + iterationsValues[ newtonIterId ][ -1 ] += 1.0 elif "Linear Solver" in line: - linearIter: int = fcts.extractLinearIter(line) - iterationsValues[linearIterId][-1] += linearIter + linearIter: int = fcts.extractLinearIter( line ) + iterationsValues[ linearIterId ][ -1 ] += linearIter line = file.readline() id_line += 1 self.m_solversIterationsValues = iterationsValues self.m_timesteps = timesteps self.m_dts = dts - def readAll(self: Self, filepath: str) -> None: + def readAll( self: Self, filepath: str ) -> None: """Initialize all the attributes of the class by reading a Geos log file. Args: filepath (str): Geos log filepath. """ - with open(filepath) as geosFile: - total_lines: int = fcts.countNumberLines(filepath) - self.readIterationsValues(geosFile, total_lines) + with open( filepath ) as geosFile: + total_lines: int = fcts.countNumberLines( filepath ) + self.readIterationsValues( geosFile, total_lines ) - def calculateExtraValues(self: Self) -> None: + def calculateExtraValues( self: Self ) -> None: """Add cumulated columns for newtonIter and linearIter.""" - siv: dict[str, list[float]] = self.m_solversIterationsValues + siv: dict[ str, list[ float ] ] = self.m_solversIterationsValues cumulatedNewtonIter, cumulatedLinearIter = fcts.identifyProperties( - ["CumulatedNewtonIter", "CumulatedLinearIter"] - ) - siv[cumulatedNewtonIter] = [] - siv[cumulatedLinearIter] = [] - newtonIterId, linearIterId = fcts.identifyProperties( - ["NewtonIter", "LinearIter"] - ) - newtonIter: list[float] = siv[newtonIterId] - linearIter: list[float] = siv[linearIterId] + [ "CumulatedNewtonIter", "CumulatedLinearIter" ] ) + siv[ cumulatedNewtonIter ] = [] + siv[ cumulatedLinearIter ] = [] + newtonIterId, linearIterId = fcts.identifyProperties( [ "NewtonIter", "LinearIter" ] ) + newtonIter: list[ float ] = siv[ newtonIterId ] + linearIter: list[ float ] = siv[ linearIterId ] sumNewtonIter: float = 0.0 sumLinearIter: float = 0.0 - for i in range(len(newtonIter)): - sumNewtonIter += newtonIter[i] - sumLinearIter += linearIter[i] - siv[cumulatedNewtonIter].append(sumNewtonIter) - siv[cumulatedLinearIter].append(sumLinearIter) + for i in range( len( newtonIter ) ): + sumNewtonIter += newtonIter[ i ] + sumLinearIter += linearIter[ i ] + siv[ cumulatedNewtonIter ].append( sumNewtonIter ) + siv[ cumulatedLinearIter ].append( sumLinearIter ) - def createDataframe(self: Self) -> pd.DataFrame: + def createDataframe( self: Self ) -> pd.DataFrame: """Create and fill and return dataframeSolversIterations. Returns: pd.DataFrame: dataframe with values from Geos log. """ - colNames: list[str] = [] - colValues: list[float] = [] + colNames: list[ str ] = [] + colValues: list[ float ] = [] for propName, values in self.m_solversIterationsValues.items(): - colNames.append(propName) - colValues.append(values) # type: ignore[arg-type] - timeUnit: str = self.m_propertiesUnit["time"].unitLabel + colNames.append( propName ) + colValues.append( values ) # type: ignore[arg-type] + timeUnit: str = self.m_propertiesUnit[ "time" ].unitLabel timeName: str = "Time__" + timeUnit dtName: str = "dt__" + timeUnit - colNames.append(timeName) - colNames.append(dtName) - colValues.append(self.m_timesteps) # type: ignore[arg-type] - colValues.append(self.m_dts) # type: ignore[arg-type] - data = {colNames[i]: colValues[i] for i in range(len(colNames))} - dataframeSolversIterations: pd.DataFrame = pd.DataFrame(data) + colNames.append( timeName ) + colNames.append( dtName ) + colValues.append( self.m_timesteps ) # type: ignore[arg-type] + colValues.append( self.m_dts ) # type: ignore[arg-type] + data = { colNames[ i ]: colValues[ i ] for i in range( len( colNames ) ) } + dataframeSolversIterations: pd.DataFrame = pd.DataFrame( data ) return dataframeSolversIterations diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py index 544febac..b84d709c 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py @@ -12,11 +12,12 @@ class GeosLogReaderFlow: + def __init__( self: Self, filepath: str, - propertiesUnit: dict[str, Unit], - phaseNames: Union[list[str], None] = None, + propertiesUnit: dict[ str, Unit ], + phaseNames: Union[ list[ str ], None ] = None, ) -> None: """A reader that reads .txt and .out files containing Geos logs. @@ -48,26 +49,24 @@ def __init__( Defaults to []. """ self.m_propertiesUnit = propertiesUnit - self.m_regionNames: list[str] = [] - numberPhases: int = fcts.findNumberPhasesSimulation(filepath) + self.m_regionNames: list[ str ] = [] + numberPhases: int = fcts.findNumberPhasesSimulation( filepath ) if phaseNames is None: phaseNames = [] - self.m_phaseNames: list[str] = fcts.phaseNamesBuilder(numberPhases, phaseNames) + self.m_phaseNames: list[ str ] = fcts.phaseNamesBuilder( numberPhases, phaseNames ) self.m_computeStatisticsName: str = "" - self.m_regionsPropertiesValues: dict[str, list[float]] = {} - self.m_timesteps: list[float] = [] - - toFindInLog: list[str] = ["Adding Object CellElementRegion", "Time: 0"] - if not fcts.elementsAreInLog(filepath, toFindInLog): - print( - "Invalid Geos log file. Please check that your log" - + " did not crash and contains statistics on flow properties." - ) + self.m_regionsPropertiesValues: dict[ str, list[ float ] ] = {} + self.m_timesteps: list[ float ] = [] + + toFindInLog: list[ str ] = [ "Adding Object CellElementRegion", "Time: 0" ] + if not fcts.elementsAreInLog( filepath, toFindInLog ): + print( "Invalid Geos log file. Please check that your log" + + " did not crash and contains statistics on flow properties." ) else: - self.readAll(filepath) + self.readAll( filepath ) - def readRegionNames(self: Self, file: TextIOBase) -> int: + def readRegionNames( self: Self, file: TextIOBase ) -> int: """Initialize the m_regionNames attribute by reading log file. Args: @@ -77,23 +76,21 @@ def readRegionNames(self: Self, file: TextIOBase) -> int: int: The id of the last line read that contained the tag "Adding Object CellElementRegion" """ - regionsName: list[str] = [] + regionsName: list[ str ] = [] line: str = file.readline() id_line: int = 1 while "Adding Object CellElementRegion" not in line: line = file.readline() id_line += 1 while "Adding Object CellElementRegion" in line: - regionName: str = fcts.extractRegion(line) - regionsName.append(regionName) + regionName: str = fcts.extractRegion( line ) + regionsName.append( regionName ) line = file.readline() id_line += 1 self.m_regionNames = regionsName return id_line - def readComputeStatisticsName( - self: Self, file: TextIOBase, id_line: int, total_lines: int - ) -> tuple[int, str]: + def readComputeStatisticsName( self: Self, file: TextIOBase, id_line: int, total_lines: int ) -> tuple[ int, str ]: """Read flow statistics from the Geos log file. Args: @@ -108,7 +105,7 @@ def readComputeStatisticsName( computeStatisticsName: str = "" line: str = file.readline() id_line += 1 - while not line.startswith("Time: 0"): + while not line.startswith( "Time: 0" ): line = file.readline() id_line += 1 keepReading: bool = True @@ -116,18 +113,16 @@ def readComputeStatisticsName( line = file.readline() id_line += 1 if id_line > total_lines: - raise ValueError("No statistics name found in the log") + raise ValueError( "No statistics name found in the log" ) for regionName in self.m_regionNames: if regionName in line: - computeStatisticsName = fcts.extractStatsName(line) + computeStatisticsName = fcts.extractStatsName( line ) keepReading = False break self.m_computeStatisticsName = computeStatisticsName - return (id_line, line) + return ( id_line, line ) - def readPropertiesValues( - self: Self, file: TextIOBase, id_line: int, total_lines: int, lineTagStats: str - ) -> None: + def readPropertiesValues( self: Self, file: TextIOBase, id_line: int, total_lines: int, lineTagStats: str ) -> None: """Read property values from Geos log file. Initialize the m_regionsPropertiesValues and m_timesteps attributes @@ -142,83 +137,71 @@ def readPropertiesValues( lineTagStats (str): The first line containing the tag of the flow statistics model. """ - regionPropertiesValues: dict[str, list[float]] = {} + regionPropertiesValues: dict[ str, list[ float ] ] = {} newTimestep: float = 0.0 - timesteps: list[float] = [newTimestep] + timesteps: list[ float ] = [ newTimestep ] line: str = lineTagStats while id_line <= total_lines: - if line.startswith("Time:"): - newTimestep, dt = fcts.extractTimeAndDt(line) - newTimestep = fcts.convertValues( - ["Time"], [newTimestep], self.m_propertiesUnit - )[0] + if line.startswith( "Time:" ): + newTimestep, dt = fcts.extractTimeAndDt( line ) + newTimestep = fcts.convertValues( [ "Time" ], [ newTimestep ], self.m_propertiesUnit )[ 0 ] if self.m_computeStatisticsName in line and "CFL" not in line: - if newTimestep not in timesteps and newTimestep > max( - timesteps, default=0.0 - ): - timesteps.append(newTimestep) + if newTimestep not in timesteps and newTimestep > max( timesteps, default=0.0 ): + timesteps.append( newTimestep ) for key in regionPropertiesValues: - regionPropertiesValues[key].append(0.0) - propsName: list[str] = fcts.extractPropertiesFlow( - line, self.m_phaseNames - ) - propsNameId: list[str] = fcts.identifyProperties(propsName) + regionPropertiesValues[ key ].append( 0.0 ) + propsName: list[ str ] = fcts.extractPropertiesFlow( line, self.m_phaseNames ) + propsNameId: list[ str ] = fcts.identifyProperties( propsName ) for propNameId in propsNameId: if propNameId not in regionPropertiesValues: - regionPropertiesValues[propNameId] = [0.0] - propsValue: list[float] = fcts.extractValuesFlow(line) - valuesConverted: list[float] = fcts.convertValues( - propsName, propsValue, self.m_propertiesUnit - ) - for i, name in enumerate(propsNameId): - regionPropertiesValues[name][-1] = valuesConverted[i] + regionPropertiesValues[ propNameId ] = [ 0.0 ] + propsValue: list[ float ] = fcts.extractValuesFlow( line ) + valuesConverted: list[ float ] = fcts.convertValues( propsName, propsValue, self.m_propertiesUnit ) + for i, name in enumerate( propsNameId ): + regionPropertiesValues[ name ][ -1 ] = valuesConverted[ i ] line = file.readline() id_line += 1 self.m_regionsPropertiesValues = regionPropertiesValues self.m_timesteps = timesteps - def readAll(self: Self, filepath: str) -> None: + def readAll( self: Self, filepath: str ) -> None: """Initialize all the attributes of the class by reading a Geos log file. Args: filepath (str): Geos log filepath. """ - with open(filepath) as geosFile: - total_lines: int = fcts.countNumberLines(filepath) - id_line: int = self.readRegionNames(geosFile) - id_line, lineTag = self.readComputeStatisticsName( - geosFile, id_line, total_lines - ) - self.readPropertiesValues(geosFile, id_line, total_lines, lineTag) - - def createDataframe(self: Self) -> pd.DataFrame: + with open( filepath ) as geosFile: + total_lines: int = fcts.countNumberLines( filepath ) + id_line: int = self.readRegionNames( geosFile ) + id_line, lineTag = self.readComputeStatisticsName( geosFile, id_line, total_lines ) + self.readPropertiesValues( geosFile, id_line, total_lines, lineTag ) + + def createDataframe( self: Self ) -> pd.DataFrame: """Create and fill and return dataframeFlow. Returns: pd.DataFrame: dataframe with values from Geos log. """ try: - colNames: list[str] = [] - colValues: list[float] = [] + colNames: list[ str ] = [] + colValues: list[ float ] = [] for propName, values in self.m_regionsPropertiesValues.items(): - unitObj: Unit = self.m_propertiesUnit["nounit"] + unitObj: Unit = self.m_propertiesUnit[ "nounit" ] for propertyType in self.m_propertiesUnit: if propertyType in propName.lower(): - unitObj = self.m_propertiesUnit[propertyType] + unitObj = self.m_propertiesUnit[ propertyType ] break if unitObj.unitLabel == "": - raise ValueError( - "No unit was found for this property name <<" + propName + ">>." - ) + raise ValueError( "No unit was found for this property name <<" + propName + ">>." ) columnName: str = propName + "__" + unitObj.unitLabel - colNames.append(columnName) - colValues.append(values) # type: ignore[arg-type] - timeUnit: str = self.m_propertiesUnit["time"].unitLabel + colNames.append( columnName ) + colValues.append( values ) # type: ignore[arg-type] + timeUnit: str = self.m_propertiesUnit[ "time" ].unitLabel timeName: str = "Time__" + timeUnit - colNames.append(timeName) - colValues.append(self.m_timesteps) # type: ignore[arg-type] - data = {colNames[i]: colValues[i] for i in range(len(colNames))} - dataframeFlow: pd.DataFrame = pd.DataFrame(data) + colNames.append( timeName ) + colValues.append( self.m_timesteps ) # type: ignore[arg-type] + data = { colNames[ i ]: colValues[ i ] for i in range( len( colNames ) ) } + dataframeFlow: pd.DataFrame = pd.DataFrame( data ) return dataframeFlow except ValueError as err: - print(err.args[0]) + print( err.args[ 0 ] ) diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py index e028e15f..eead1c95 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py @@ -12,11 +12,12 @@ class GeosLogReaderWells: + def __init__( self: Self, filepath: str, - propertiesUnit: dict[str, Unit], - phaseNames: Union[list[str], None] = None, + propertiesUnit: dict[ str, Unit ], + phaseNames: Union[ list[ str ], None ] = None, numberWellsForMean: int = 1, ) -> None: """Read for Wells from Geos log file. @@ -47,39 +48,36 @@ def __init__( Defaults to None. numberWellsForMean (int, optional): Number of wells. Defaults to 1. """ - self.m_propertiesUnit: dict[str, Unit] = propertiesUnit + self.m_propertiesUnit: dict[ str, Unit ] = propertiesUnit self.m_numberWellsForMean: int = numberWellsForMean - self.m_wellNames: list[str] = [] - numberPhases: int = fcts.findNumberPhasesSimulation(filepath) + self.m_wellNames: list[ str ] = [] + numberPhases: int = fcts.findNumberPhasesSimulation( filepath ) if phaseNames is None: phaseNames = [] - self.m_phaseNames: list[str] = fcts.phaseNamesBuilder(numberPhases, phaseNames) - self.m_wellsPropertiesValues: dict[str, list[float]] = {} - self.m_timesteps: list[float] = [] + self.m_phaseNames: list[ str ] = fcts.phaseNamesBuilder( numberPhases, phaseNames ) + self.m_wellsPropertiesValues: dict[ str, list[ float ] ] = {} + self.m_timesteps: list[ float ] = [] - toFindInLog1: list[str] = [ + toFindInLog1: list[ str ] = [ "_ConstantBHP_table", "Time: 0", " TableFunction: ", ] - toFindInLog2: list[str] = [ + toFindInLog2: list[ str ] = [ "_ConstantPhaseRate_table", "Time: 0", " TableFunction: ", ] - foundInLog1: bool = fcts.elementsAreInLog(filepath, toFindInLog1) - foundInLog2: bool = fcts.elementsAreInLog(filepath, toFindInLog2) + foundInLog1: bool = fcts.elementsAreInLog( filepath, toFindInLog1 ) + foundInLog2: bool = fcts.elementsAreInLog( filepath, toFindInLog2 ) if not foundInLog1 or not foundInLog2: - print( - "Invalid Geos log file. Please check that your log" - + " did not crash and contains wells." - ) + print( "Invalid Geos log file. Please check that your log" + " did not crash and contains wells." ) else: - self.readAll(filepath) + self.readAll( filepath ) self.calculateMeanValues() - def readWellNames(self: Self, file: TextIOBase) -> int: + def readWellNames( self: Self, file: TextIOBase ) -> int: """Read well names from Geos log file. Args: @@ -90,7 +88,7 @@ def readWellNames(self: Self, file: TextIOBase) -> int: int: The id of the last line read that contains the tag "Adding Object WellElementRegion". """ - wellsName: list[str] = [] + wellsName: list[ str ] = [] line: str = file.readline() id_line: int = 1 intoWellNames: bool = False @@ -102,9 +100,9 @@ def readWellNames(self: Self, file: TextIOBase) -> int: intoTableFunctions: bool = True while intoTableFunctions: if "_ConstantBHP_table" in line or "_ConstantPhaseRate_table" in line: - wellName: str = fcts.extractWell(line) + wellName: str = fcts.extractWell( line ) if wellName not in wellsName: - wellsName.append(wellName) + wellsName.append( wellName ) line = file.readline() id_line += 1 if " TableFunction: " not in line: @@ -112,28 +110,26 @@ def readWellNames(self: Self, file: TextIOBase) -> int: self.m_wellNames = wellsName return id_line - def initWellPropertiesValues(self: Self) -> None: + def initWellPropertiesValues( self: Self ) -> None: """Initialize the m_wellPropertiesValues.""" - props: dict[str, list[float]] = {} + props: dict[ str, list[ float ] ] = {} for name in self.m_wellNames: - wName: str = fcts.formatPropertyName(name) + wName: str = fcts.formatPropertyName( name ) bhp: str = wName + "__BHP" totalMassRate: str = wName + "__TotalMassRate" totalSVR: str = wName + "__TotalSurfaceVolumetricRate" - propsNoId: list[str] = [bhp, totalMassRate, totalSVR] - if len(self.m_phaseNames) > 1: + propsNoId: list[ str ] = [ bhp, totalMassRate, totalSVR ] + if len( self.m_phaseNames ) > 1: for phase in self.m_phaseNames: - pName: str = fcts.formatPropertyName(phase) + pName: str = fcts.formatPropertyName( phase ) phaseSVR: str = wName + "__SurfaceVolumetricRate" + pName - propsNoId.append(phaseSVR) - propsWithId = fcts.identifyProperties(propsNoId) + propsNoId.append( phaseSVR ) + propsWithId = fcts.identifyProperties( propsNoId ) for propName in propsWithId: - props[propName] = [0.0] + props[ propName ] = [ 0.0 ] self.m_wellsPropertiesValues = props - def readPropertiesValues( - self: Self, file: TextIOBase, id_line: int, total_lines: int - ) -> None: + def readPropertiesValues( self: Self, file: TextIOBase, id_line: int, total_lines: int ) -> None: """Read property values from Geos log file. Initialize the m_regionsPropertiesValues and m_timesteps attributes @@ -148,69 +144,55 @@ def readPropertiesValues( """ line: str = file.readline() id_line += 1 - while not line.startswith("Time: 0"): + while not line.startswith( "Time: 0" ): line = file.readline() id_line += 1 - wellsPropertiesValues: dict[str, list[float]] = self.m_wellsPropertiesValues - currentWellName: str = self.m_wellNames[0] - currentPhaseName: str = self.m_phaseNames[0] + wellsPropertiesValues: dict[ str, list[ float ] ] = self.m_wellsPropertiesValues + currentWellName: str = self.m_wellNames[ 0 ] + currentPhaseName: str = self.m_phaseNames[ 0 ] newTimestep: float = 0.0 - timesteps: list[float] = [newTimestep] + timesteps: list[ float ] = [ newTimestep ] while id_line <= total_lines: - wellTags = fcts.extractWellTags(line) - if line.startswith("Time:"): - newTimestep, dt = fcts.extractTimeAndDt(line) - newTimestep = fcts.convertValues( - ["Time"], [newTimestep], self.m_propertiesUnit - )[0] + wellTags = fcts.extractWellTags( line ) + if line.startswith( "Time:" ): + newTimestep, dt = fcts.extractTimeAndDt( line ) + newTimestep = fcts.convertValues( [ "Time" ], [ newTimestep ], self.m_propertiesUnit )[ 0 ] # If at least one well tag is found, this is a well line - if len(wellTags) > 0: - if newTimestep not in timesteps and newTimestep > max( - timesteps, default=0.0 - ): - timesteps.append(newTimestep) + if len( wellTags ) > 0: + if newTimestep not in timesteps and newTimestep > max( timesteps, default=0.0 ): + timesteps.append( newTimestep ) for key in wellsPropertiesValues: - wellsPropertiesValues[key].append(0.0) - newWellName: str = fcts.identifyCurrentWell(line, currentWellName) + wellsPropertiesValues[ key ].append( 0.0 ) + newWellName: str = fcts.identifyCurrentWell( line, currentWellName ) if newWellName != currentWellName: if newWellName in self.m_wellNames: currentWellName = newWellName else: - print( - f"Invalid well name <<{newWellName}>> found" - + f" at timestep <<{str(newTimestep)}>>" - + f" in line :\n<<{line}>>.\nAnother correct well" - + f" name <<{currentWellName}>> was used to" - + " correct this.\nExpected well names are :" - + f" {str(self.m_wellNames)}.\n" - ) - if ("phase" in line.lower()) and ("phase surface" not in line.lower()): - newPhaseId: int = fcts.extractPhaseId(line) - if self.m_phaseNames[newPhaseId] != currentWellName: - currentPhaseName = self.m_phaseNames[newPhaseId] - propsName: list[str] = fcts.extractPropertiesWell( - line, currentWellName, currentPhaseName - ) + print( f"Invalid well name <<{newWellName}>> found" + f" at timestep <<{str(newTimestep)}>>" + + f" in line :\n<<{line}>>.\nAnother correct well" + + f" name <<{currentWellName}>> was used to" + + " correct this.\nExpected well names are :" + f" {str(self.m_wellNames)}.\n" ) + if ( "phase" in line.lower() ) and ( "phase surface" not in line.lower() ): + newPhaseId: int = fcts.extractPhaseId( line ) + if self.m_phaseNames[ newPhaseId ] != currentWellName: + currentPhaseName = self.m_phaseNames[ newPhaseId ] + propsName: list[ str ] = fcts.extractPropertiesWell( line, currentWellName, currentPhaseName ) for name in propsName: if "density" in name.lower(): - propsName.pop(propsName.index(name)) - if len(propsName) > 0 and "IsShut" not in propsName[0]: - propsNameId: list[str] = fcts.identifyProperties(propsName) - propsValue: list[float] = fcts.extractValuesWell( - line, len(propsName) - ) - valuesConverted: list[float] = fcts.convertValues( - propsName, propsValue, self.m_propertiesUnit - ) - for i, name in enumerate(propsNameId): - wellsPropertiesValues[name][-1] = valuesConverted[i] + propsName.pop( propsName.index( name ) ) + if len( propsName ) > 0 and "IsShut" not in propsName[ 0 ]: + propsNameId: list[ str ] = fcts.identifyProperties( propsName ) + propsValue: list[ float ] = fcts.extractValuesWell( line, len( propsName ) ) + valuesConverted: list[ float ] = fcts.convertValues( propsName, propsValue, self.m_propertiesUnit ) + for i, name in enumerate( propsNameId ): + wellsPropertiesValues[ name ][ -1 ] = valuesConverted[ i ] line = file.readline() id_line += 1 self.m_wellsPropertiesValues = wellsPropertiesValues self.m_timesteps = timesteps - def readAll(self: Self, filepath: str) -> None: + def readAll( self: Self, filepath: str ) -> None: """Initialize all the attributes of the class by reading a Geos log file. Args: @@ -218,76 +200,70 @@ def readAll(self: Self, filepath: str) -> None: singlephase (bool): True if its a singlephase simulation, False if multiphase. """ - with open(filepath) as geosFile: - total_lines: int = fcts.countNumberLines(filepath) - id_line = self.readWellNames(geosFile) + with open( filepath ) as geosFile: + total_lines: int = fcts.countNumberLines( filepath ) + id_line = self.readWellNames( geosFile ) self.initWellPropertiesValues() - self.readPropertiesValues(geosFile, id_line, total_lines) + self.readPropertiesValues( geosFile, id_line, total_lines ) - def calculateMeanValues(self: Self) -> None: + def calculateMeanValues( self: Self ) -> None: """Calculate mean values of all wells.""" nbr: int = self.m_numberWellsForMean - wNames: list[str] = self.m_wellNames - pNames: list[str] = self.m_phaseNames - wpv: dict[str, list[float]] = self.m_wellsPropertiesValues - cNames: list[str] = list(wpv.keys()) - bhpNames: list[str] = [n for n in cNames if "bhp" in n.lower()] - totalMassRateNames: list[str] = [ - n for n in cNames if "totalmassrate" in n.lower() - ] - totalSVRNames: list[str] = [ - n for n in cNames if "totalsurfacevolumetricrate" in n.lower() - ] - differentMeanColumns: dict[str, list[str]] = { + wNames: list[ str ] = self.m_wellNames + pNames: list[ str ] = self.m_phaseNames + wpv: dict[ str, list[ float ] ] = self.m_wellsPropertiesValues + cNames: list[ str ] = list( wpv.keys() ) + bhpNames: list[ str ] = [ n for n in cNames if "bhp" in n.lower() ] + totalMassRateNames: list[ str ] = [ n for n in cNames if "totalmassrate" in n.lower() ] + totalSVRNames: list[ str ] = [ n for n in cNames if "totalsurfacevolumetricrate" in n.lower() ] + differentMeanColumns: dict[ str, list[ str ] ] = { "MeanBHP": bhpNames, "MeanTotalMassRate": totalMassRateNames, "MeanTotalVolumetricRate": totalSVRNames, } for pName in pNames: - pName = fcts.formatPropertyName(pName) + pName = fcts.formatPropertyName( pName ) meanName: str = "MeanSurfaceVolumetricRate" + pName - differentMeanColumns[meanName] = [] + differentMeanColumns[ meanName ] = [] for wName in wNames: - wName = fcts.formatPropertyName(wName) + wName = fcts.formatPropertyName( wName ) n: str = wName + "__SurfaceVolumetricRate" + pName - n = fcts.identifyProperties([n])[0] + n = fcts.identifyProperties( [ n ] )[ 0 ] if n in cNames: - differentMeanColumns[meanName].append(n) + differentMeanColumns[ meanName ].append( n ) for meanName, columns in differentMeanColumns.items(): - if len(columns) > 0: - values: list[list[float]] = [wpv[c] for c in columns] - meanValues: list[float] = [sum(item) / nbr for item in zip(*values)] - meanNameWithId: str = fcts.identifyProperties([meanName])[0] - self.m_wellsPropertiesValues[meanNameWithId] = meanValues + if len( columns ) > 0: + values: list[ list[ float ] ] = [ wpv[ c ] for c in columns ] + meanValues: list[ float ] = [ sum( item ) / nbr for item in zip( *values ) ] + meanNameWithId: str = fcts.identifyProperties( [ meanName ] )[ 0 ] + self.m_wellsPropertiesValues[ meanNameWithId ] = meanValues - def createDataframe(self: Self) -> pd.DataFrame: + def createDataframe( self: Self ) -> pd.DataFrame: """Create and fill and return dataframeWells. Return: pd.DataFrame: dataframe with log values. """ - colNames: list[str] = [] - colValues: list[float] = [] + colNames: list[ str ] = [] + colValues: list[ float ] = [] try: for propName, values in self.m_wellsPropertiesValues.items(): - unitObj: Unit = self.m_propertiesUnit["nounit"] + unitObj: Unit = self.m_propertiesUnit[ "nounit" ] for propertyType in self.m_propertiesUnit: if propertyType.lower() in propName.lower(): - unitObj = self.m_propertiesUnit[propertyType] + unitObj = self.m_propertiesUnit[ propertyType ] break if unitObj.unitLabel == "": - raise ValueError( - "No unit was found for this property name <<" + propName + ">>." - ) + raise ValueError( "No unit was found for this property name <<" + propName + ">>." ) columnName: str = propName + "__" + unitObj.unitLabel - colNames.append(columnName) - colValues.append(values) # type: ignore[arg-type] + colNames.append( columnName ) + colValues.append( values ) # type: ignore[arg-type] except ValueError as err: - print(err.args[0]) - timeUnit: str = self.m_propertiesUnit["time"].unitLabel + print( err.args[ 0 ] ) + timeUnit: str = self.m_propertiesUnit[ "time" ].unitLabel timeName: str = "Time__" + timeUnit - colNames.append(timeName) - colValues.append(self.m_timesteps) # type: ignore[arg-type] - data = {colNames[i]: colValues[i] for i in range(len(colNames))} - dataframeWells: pd.DataFrame = pd.DataFrame(data) + colNames.append( timeName ) + colValues.append( self.m_timesteps ) # type: ignore[arg-type] + data = { colNames[ i ]: colValues[ i ] for i in range( len( colNames ) ) } + dataframeWells: pd.DataFrame = pd.DataFrame( data ) return dataframeWells diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py b/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py index 4b4a0b70..36a2c531 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py +++ b/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py @@ -12,7 +12,7 @@ __doc__ = """Functions to read and process Geos log.""" -def extractRegion(geosLogLine: str) -> str: +def extractRegion( geosLogLine: str ) -> str: """Extracts the name of the region from a Geos log line. Args: @@ -28,21 +28,17 @@ def extractRegion(geosLogLine: str) -> str: str: "Reservoir" """ try: - lineElements: list[str] = geosLogLine.split() - namedElementIndex: int = lineElements.index("named") - if len(lineElements) > namedElementIndex + 1: - return lineElements[namedElementIndex + 1] + lineElements: list[ str ] = geosLogLine.split() + namedElementIndex: int = lineElements.index( "named" ) + if len( lineElements ) > namedElementIndex + 1: + return lineElements[ namedElementIndex + 1 ] else: - raise ValueError( - "Not enough elements to unpack in region line <<" + geosLogLine + ">>" - ) + raise ValueError( "Not enough elements to unpack in region line <<" + geosLogLine + ">>" ) except Exception as e: - raise ValueError( - "An error has occured while parsing region line <<" + geosLogLine + ">>" - ) from e + raise ValueError( "An error has occured while parsing region line <<" + geosLogLine + ">>" ) from e -def extractWell(geosLogLine: str) -> str: +def extractWell( geosLogLine: str ) -> str: """Extracts the name of the well from a Geos log line. Args: @@ -58,27 +54,21 @@ def extractWell(geosLogLine: str) -> str: str: "wellControls" """ try: - lineElements: list[str] = geosLogLine.split(":") - wellName: str = lineElements[1].replace(" ", "") + lineElements: list[ str ] = geosLogLine.split( ":" ) + wellName: str = lineElements[ 1 ].replace( " ", "" ) indexEndName: int if "_ConstantBHP_table" in wellName: - indexEndName = wellName.index("_ConstantBHP_table") + indexEndName = wellName.index( "_ConstantBHP_table" ) elif "_ConstantPhaseRate_table" in wellName: - indexEndName = wellName.index("_ConstantPhaseRate_table") + indexEndName = wellName.index( "_ConstantPhaseRate_table" ) else: - raise ValueError( - "The expected format was not found when parsing line <<" - + geosLogLine - + ">>" - ) - return wellName[:indexEndName] + raise ValueError( "The expected format was not found when parsing line <<" + geosLogLine + ">>" ) + return wellName[ :indexEndName ] except Exception as e: - raise ValueError( - "An error has occured while parsing region line <<" + geosLogLine + ">>" - ) from e + raise ValueError( "An error has occured while parsing region line <<" + geosLogLine + ">>" ) from e -def extractAquifer(geosLogLine: str) -> str: +def extractAquifer( geosLogLine: str ) -> str: """Extracts the name of the aquifer from a Geos log line. Args: @@ -94,17 +84,15 @@ def extractAquifer(geosLogLine: str) -> str: str: "aquifer1" """ try: - lineElements: list[str] = geosLogLine.split(":") - aquiferName: str = lineElements[1].replace(" ", "") - indexEndName: int = aquiferName.index("_pressureInfluence_table") - return aquiferName[:indexEndName] + lineElements: list[ str ] = geosLogLine.split( ":" ) + aquiferName: str = lineElements[ 1 ].replace( " ", "" ) + indexEndName: int = aquiferName.index( "_pressureInfluence_table" ) + return aquiferName[ :indexEndName ] except Exception as e: - raise ValueError( - "An error has occured while parsing region line <<" + geosLogLine + ">>" - ) from e + raise ValueError( "An error has occured while parsing region line <<" + geosLogLine + ">>" ) from e -def extractStatsName(geosLogLine: str) -> str: +def extractStatsName( geosLogLine: str ) -> str: """Extracts the name of the computed statistics name from a Geos log line. Args: @@ -114,11 +102,11 @@ def extractStatsName(geosLogLine: str) -> str: Returns: str: "compflowStatistics" """ - lineElements: list[str] = geosLogLine.split(",") - return lineElements[0] + lineElements: list[ str ] = geosLogLine.split( "," ) + return lineElements[ 0 ] -def extractPhaseModel(geosLogLine: str) -> str: +def extractPhaseModel( geosLogLine: str ) -> str: """Extracts the name of a phase model from a Geos log line. Args: @@ -133,22 +121,20 @@ def extractPhaseModel(geosLogLine: str) -> str: str: "PhillipsBrineDensity" """ try: - cleanLine: str = replaceSpecialCharactersWithWhitespace(geosLogLine) - lineElements: list[str] = cleanLine.split() - phaseModels: list[str] = [elt for elt in lineElements if "phaseModel" in elt] - matchingPhaseModel: str = phaseModels[0] - phaseModelElementIndex: int = lineElements.index(matchingPhaseModel) - if len(lineElements) > phaseModelElementIndex + 1: - return lineElements[phaseModelElementIndex + 1] + cleanLine: str = replaceSpecialCharactersWithWhitespace( geosLogLine ) + lineElements: list[ str ] = cleanLine.split() + phaseModels: list[ str ] = [ elt for elt in lineElements if "phaseModel" in elt ] + matchingPhaseModel: str = phaseModels[ 0 ] + phaseModelElementIndex: int = lineElements.index( matchingPhaseModel ) + if len( lineElements ) > phaseModelElementIndex + 1: + return lineElements[ phaseModelElementIndex + 1 ] else: - raise ValueError("Not enough elements to unpack in <<" + geosLogLine + ">>") + raise ValueError( "Not enough elements to unpack in <<" + geosLogLine + ">>" ) except Exception as e: - raise ValueError( - "An error has occured while parsing <<" + geosLogLine + ">>" - ) from e + raise ValueError( "An error has occured while parsing <<" + geosLogLine + ">>" ) from e -def extractPropertiesFlow(geosLogLine: str, phasesName: list[str]) -> list[str]: +def extractPropertiesFlow( geosLogLine: str, phasesName: list[ str ] ) -> list[ str ]: """Extracts flow property from a Geos log line. Args: @@ -164,38 +150,27 @@ def extractPropertiesFlow(geosLogLine: str, phasesName: list[str]) -> list[str]: list[str]: ["Reservoir__DeltaPressureMin", "Reservoir__DeltaPressureMax"] """ try: - lineBlocks: list[str] = geosLogLine.split(":") - if len(lineBlocks) == 3: - propertyLineBlock: str = lineBlocks[1] - propertiesName: list[str] = buildPropertiesNameFromGeosProperties( - propertyLineBlock, phasesName - ) - statsBlock: str = lineBlocks[0] - statsElements: list[str] = statsBlock.split() - if len(statsElements) >= 2: - regionName: str = statsElements[1] - formattedRegion: str = formatPropertyName(regionName) - formattedProps = [formatPropertyName(prop) for prop in propertiesName] - propertiesFlow: list[str] = [ - formattedRegion + "__" + prop for prop in formattedProps - ] + lineBlocks: list[ str ] = geosLogLine.split( ":" ) + if len( lineBlocks ) == 3: + propertyLineBlock: str = lineBlocks[ 1 ] + propertiesName: list[ str ] = buildPropertiesNameFromGeosProperties( propertyLineBlock, phasesName ) + statsBlock: str = lineBlocks[ 0 ] + statsElements: list[ str ] = statsBlock.split() + if len( statsElements ) >= 2: + regionName: str = statsElements[ 1 ] + formattedRegion: str = formatPropertyName( regionName ) + formattedProps = [ formatPropertyName( prop ) for prop in propertiesName ] + propertiesFlow: list[ str ] = [ formattedRegion + "__" + prop for prop in formattedProps ] return propertiesFlow else: - raise ValueError( - "Incorrect number of blocks in line <<" - + geosLogLine - + ">> for it to find property name." - ) + raise ValueError( "Incorrect number of blocks in line <<" + geosLogLine + + ">> for it to find property name." ) except Exception as e: - raise ValueError( - "An error has occured while parsing <<" + geosLogLine + ">>" - ) from e + raise ValueError( "An error has occured while parsing <<" + geosLogLine + ">>" ) from e return [] -def buildPropertiesNameFromGeosProperties( - geosProperties: str, phasesName: list[str] -) -> list[str]: +def buildPropertiesNameFromGeosProperties( geosProperties: str, phasesName: list[ str ] ) -> list[ str ]: """Extracts the property name and its extensions like min, max, average. Args: @@ -205,24 +180,24 @@ def buildPropertiesNameFromGeosProperties( Returns: list[str]: [" Delta pressure min", " Delta pressure max"] """ - separatedNameAndExtension: list[str] = geosProperties.split("(") - nameBlock: str = separatedNameAndExtension[0] - finalPropertiesName: list[str] = [] + separatedNameAndExtension: list[ str ] = geosProperties.split( "(" ) + nameBlock: str = separatedNameAndExtension[ 0 ] + finalPropertiesName: list[ str ] = [] if " phase " in geosProperties or " Phase " in geosProperties: - finalPropertiesName = buildPropertiesNameForPhases(nameBlock, phasesName) + finalPropertiesName = buildPropertiesNameForPhases( nameBlock, phasesName ) elif " component " in geosProperties or " Component " in geosProperties: - finalPropertiesName = buildPropertiesNameForComponents(phasesName) + finalPropertiesName = buildPropertiesNameForComponents( phasesName ) else: # means that extensions have been found - if len(separatedNameAndExtension) == 2: - extensions: str = separatedNameAndExtension[1] - finalPropertiesName = buildPropertiesNameNoPhases(nameBlock, extensions) + if len( separatedNameAndExtension ) == 2: + extensions: str = separatedNameAndExtension[ 1 ] + finalPropertiesName = buildPropertiesNameNoPhases( nameBlock, extensions ) else: - finalPropertiesName = buildPropertiesNameNoPhases(nameBlock) + finalPropertiesName = buildPropertiesNameNoPhases( nameBlock ) return finalPropertiesName -def buildPropertiesNameForPhases(nameBlock: str, phasesName: list[str]) -> list[str]: +def buildPropertiesNameForPhases( nameBlock: str, phasesName: list[ str ] ) -> list[ str ]: """Replace phase by phase names. Args: @@ -232,17 +207,17 @@ def buildPropertiesNameForPhases(nameBlock: str, phasesName: list[str]) -> list[ Returns: list[str]: ['Mobile CO2 mass', 'Mobile Water mass'] """ - propertiesName: list[str] = [] + propertiesName: list[ str ] = [] for phaseName in phasesName: if " phase " in nameBlock: - newName: str = nameBlock.replace("phase", phaseName) + newName: str = nameBlock.replace( "phase", phaseName ) else: - newName = nameBlock.replace("Phase", phaseName) - propertiesName.append(newName) + newName = nameBlock.replace( "Phase", phaseName ) + propertiesName.append( newName ) return propertiesName -def buildPropertiesNameForComponents(phasesName: list[str]) -> list[str]: +def buildPropertiesNameForComponents( phasesName: list[ str ] ) -> list[ str ]: """Builds the list of component property names from the list of phases name. Args: @@ -252,15 +227,15 @@ def buildPropertiesNameForComponents(phasesName: list[str]) -> list[str]: list: ['Dissolved mass CO2 in CO2','Dissolved mass Water in CO2', 'Dissolved mass CO2 in Water','Dissolved mass Water in Water'] """ - propertiesName: list[str] = [] - for i in range(len(phasesName)): - for j in range(len(phasesName)): + propertiesName: list[ str ] = [] + for i in range( len( phasesName ) ): + for j in range( len( phasesName ) ): newName: str = f"Dissolved mass {phasesName[j]} in {phasesName[i]}" - propertiesName.append(newName) + propertiesName.append( newName ) return propertiesName -def buildPropertiesNameNoPhases(nameBlock: str, extensions: str = "") -> list[str]: +def buildPropertiesNameNoPhases( nameBlock: str, extensions: str = "" ) -> list[ str ]: """From a name block and extensions, builds a list of properties name. Args: @@ -271,15 +246,15 @@ def buildPropertiesNameNoPhases(nameBlock: str, extensions: str = "") -> list[st list: [" Delta pressure min", " Delta pressure max"] """ if extensions != "" and "metric" not in extensions: - extensionsClean = replaceSpecialCharactersWithWhitespace(extensions) + extensionsClean = replaceSpecialCharactersWithWhitespace( extensions ) extensionsName = extensionsClean.split() - propertiesName = [nameBlock + " " + ext for ext in extensionsName] + propertiesName = [ nameBlock + " " + ext for ext in extensionsName ] else: - propertiesName = [nameBlock] + propertiesName = [ nameBlock ] return propertiesName -def identifyProperties(properties: list[str]) -> list[str]: +def identifyProperties( properties: list[ str ] ) -> list[ str ]: """Identify properties and add identifer. From a list of properties name, identifies each of them with a certain @@ -292,70 +267,68 @@ def identifyProperties(properties: list[str]) -> list[str]: Returns: list[tuple[str, int]]: [1:"CaprockPressureMax", 1:"CaprockPressureMin"] """ - idProps: list[str] = [] + idProps: list[ str ] = [] # the order of the first element of every tuple is mandatory - propertiesIdentifiers: list[tuple[str, str]] = [ - ("deltapressure", "0"), - ("pressure", "1"), - ("temperature", "2"), - ("totaldynamicporevolume", "3"), - ("dynamicporevolumes", "4"), - ("nontrapped", "5"), - ("trapped", "6"), - ("immobile", "7"), - ("mobile", "8"), - ("dissolved", "9"), - ("meanbhp", "15"), - ("meantotalmassrate", "16"), - ("meantotalvolumetricrate", "17"), - ("meansurfacevolumetricrate", "18"), - ("totalmassrate", "12"), - ("totalvolumetricrate", "13"), - ("totalsurfacevolumetricrate", "13"), - ("surfacevolumetricrate", "14"), - ("totalfluidmass", "36"), - ("cellfluidmass", "37"), - ("mass", "10"), - ("bhp", "11"), - ("cumulatedvolumetricrate", "19"), - ("cumulatedvolume", "20"), - ("volumetricrate", "21"), - ("volume", "22"), - ("newtoniter", "23"), - ("lineariter", "24"), - ("stress", "25"), - ("displacement", "26"), - ("permeability", "27"), - ("porosity", "28"), - ("ratio", "29"), - ("fraction", "30"), - ("bulkmodulus", "31"), - ("shearmodulus", "32"), - ("oedometricmodulus", "33"), - ("points", "34"), - ("density", "35"), - ("time", "38"), - ("dt", "39"), + propertiesIdentifiers: list[ tuple[ str, str ] ] = [ + ( "deltapressure", "0" ), + ( "pressure", "1" ), + ( "temperature", "2" ), + ( "totaldynamicporevolume", "3" ), + ( "dynamicporevolumes", "4" ), + ( "nontrapped", "5" ), + ( "trapped", "6" ), + ( "immobile", "7" ), + ( "mobile", "8" ), + ( "dissolved", "9" ), + ( "meanbhp", "15" ), + ( "meantotalmassrate", "16" ), + ( "meantotalvolumetricrate", "17" ), + ( "meansurfacevolumetricrate", "18" ), + ( "totalmassrate", "12" ), + ( "totalvolumetricrate", "13" ), + ( "totalsurfacevolumetricrate", "13" ), + ( "surfacevolumetricrate", "14" ), + ( "totalfluidmass", "36" ), + ( "cellfluidmass", "37" ), + ( "mass", "10" ), + ( "bhp", "11" ), + ( "cumulatedvolumetricrate", "19" ), + ( "cumulatedvolume", "20" ), + ( "volumetricrate", "21" ), + ( "volume", "22" ), + ( "newtoniter", "23" ), + ( "lineariter", "24" ), + ( "stress", "25" ), + ( "displacement", "26" ), + ( "permeability", "27" ), + ( "porosity", "28" ), + ( "ratio", "29" ), + ( "fraction", "30" ), + ( "bulkmodulus", "31" ), + ( "shearmodulus", "32" ), + ( "oedometricmodulus", "33" ), + ( "points", "34" ), + ( "density", "35" ), + ( "time", "38" ), + ( "dt", "39" ), ] for prop in properties: identification: bool = False for propId in propertiesIdentifiers: - if propId[0] in prop.lower(): - idProps.append(propId[1] + ":" + prop) + if propId[ 0 ] in prop.lower(): + idProps.append( propId[ 1 ] + ":" + prop ) identification = True break if not identification: - raise ValueError( - f"The property <<{prop}>> could not be identified.\n" - + "Check that your list of meta properties is updated." - ) + raise ValueError( f"The property <<{prop}>> could not be identified.\n" + + "Check that your list of meta properties is updated." ) return idProps # TODO check if this function works when having more than 2 components -def extractValuesFlow(geosLogLine: str) -> list[float]: +def extractValuesFlow( geosLogLine: str ) -> list[ float ]: """Extract values from a Geos log line. Args: @@ -367,18 +340,18 @@ def extractValuesFlow(geosLogLine: str) -> list[float]: list[float]: list of values in the line. [0.0, 0.0, 0.0, -6.38235e+10] """ - lineElements: list[str] = geosLogLine.split(":") - valuesBlock: str = lineElements[-1] - valuesBlock = valuesBlock.replace(",", " ") - valuesFound: list[float] = extractFloatsFromString(valuesBlock) + lineElements: list[ str ] = geosLogLine.split( ":" ) + valuesBlock: str = lineElements[ -1 ] + valuesBlock = valuesBlock.replace( ",", " " ) + valuesFound: list[ float ] = extractFloatsFromString( valuesBlock ) return valuesFound def convertValues( - propertyNames: list[str], - propertyValues: list[float], - propertiesUnit: dict[str, Unit], -) -> list[float]: + propertyNames: list[ str ], + propertyValues: list[ float ], + propertiesUnit: dict[ str, Unit ], +) -> list[ float ]: """Convert properties to the desired units. Knowing two lists : 1) float numbers that are supposed to be in @@ -396,20 +369,20 @@ def convertValues( Returns: list[float]: list of converted values. """ - assert len(propertyNames) == len(propertyValues) - valuesConverted: list[float] = [] - for index, name in enumerate(propertyNames): - unitObj: Unit = propertiesUnit["nounit"] + assert len( propertyNames ) == len( propertyValues ) + valuesConverted: list[ float ] = [] + for index, name in enumerate( propertyNames ): + unitObj: Unit = propertiesUnit[ "nounit" ] for propertyType in propertiesUnit: if propertyType.lower() in name.lower(): - unitObj = propertiesUnit[propertyType] + unitObj = propertiesUnit[ propertyType ] break - valueConverted: float = convert(propertyValues[index], unitObj) - valuesConverted.append(valueConverted) + valueConverted: float = convert( propertyValues[ index ], unitObj ) + valuesConverted.append( valueConverted ) return valuesConverted -def identifyCurrentWell(geosLogLine: str, lastWellName: str) -> str: +def identifyCurrentWell( geosLogLine: str, lastWellName: str ) -> str: """Identify the current name of the well rom a Geos log line. Because properties values of wells can be output without specifying @@ -439,18 +412,18 @@ def identifyCurrentWell(geosLogLine: str, lastWellName: str) -> str: str: "wellControls" """ if ":" in geosLogLine: - lineElements: list[str] = geosLogLine.split(":") - if geosLogLine.startswith("Rank"): - wellName: str = lineElements[1] + lineElements: list[ str ] = geosLogLine.split( ":" ) + if geosLogLine.startswith( "Rank" ): + wellName: str = lineElements[ 1 ] else: - wellName = lineElements[0] + wellName = lineElements[ 0 ] else: wellName = lastWellName wellName = wellName.lstrip().rstrip() return wellName -def extractPropertiesWell(geosLogLine: str, wellName: str, phaseName: str) -> list[str]: +def extractPropertiesWell( geosLogLine: str, wellName: str, phaseName: str ) -> list[ str ]: """Extracts the well property presented from a Geos log line. Args: @@ -462,8 +435,8 @@ def extractPropertiesWell(geosLogLine: str, wellName: str, phaseName: str) -> li Returns: list[str]: ["Well1_SurfaceVolumetricRatePhase1"] """ - wName: str = formatPropertyName(wellName) - pName: str = formatPropertyName(phaseName) + wName: str = formatPropertyName( wellName ) + pName: str = formatPropertyName( phaseName ) tags_association = { "BHP": wName + "__BHP", "total massRate": wName + "__TotalMassRate", @@ -473,15 +446,15 @@ def extractPropertiesWell(geosLogLine: str, wellName: str, phaseName: str) -> li "density of phase": wName + "__DensityOf" + pName, "total fluid density": wName + "__TotalFluidDensity", } - tags_found: list[str] = extractWellTags(geosLogLine) - propertiesWell: list[str] = [] + tags_found: list[ str ] = extractWellTags( geosLogLine ) + propertiesWell: list[ str ] = [] for tag in tags_found: - correspondingName = tags_association[tag] - propertiesWell.append(correspondingName) + correspondingName = tags_association[ tag ] + propertiesWell.append( correspondingName ) return propertiesWell -def extractPhaseId(geosLogLine: str) -> int: +def extractPhaseId( geosLogLine: str ) -> int: """Extracts the phase number id from a Geos log line. Args: @@ -496,21 +469,17 @@ def extractPhaseId(geosLogLine: str) -> int: int: 0 """ try: - lineElements: list[str] = geosLogLine.lower().split() - phaseElementIndex: int = lineElements.index("phase") - if len(lineElements) > phaseElementIndex + 1: - return int(lineElements[phaseElementIndex + 1]) + lineElements: list[ str ] = geosLogLine.lower().split() + phaseElementIndex: int = lineElements.index( "phase" ) + if len( lineElements ) > phaseElementIndex + 1: + return int( lineElements[ phaseElementIndex + 1 ] ) else: - raise ValueError( - "Not enough elements to unpack in region line <<" + geosLogLine + ">>" - ) + raise ValueError( "Not enough elements to unpack in region line <<" + geosLogLine + ">>" ) except Exception as e: - raise ValueError( - "An error has occured while parsing region line <<" + geosLogLine + ">>" - ) from e + raise ValueError( "An error has occured while parsing region line <<" + geosLogLine + ">>" ) from e -def extractWellTags(geosLogLine: str) -> list[str]: +def extractWellTags( geosLogLine: str ) -> list[ str ]: """Extracts the list of well property tags available from a Geos log line. Args: @@ -519,28 +488,28 @@ def extractWellTags(geosLogLine: str) -> list[str]: Returns: list[str]: list of tags. """ - if geosLogLine.startswith("Control switch"): + if geosLogLine.startswith( "Control switch" ): return [] lower_geosLogLine = geosLogLine.lower() - tags_found_line: list[str] = [] + tags_found_line: list[ str ] = [] if "well is shut" in lower_geosLogLine: - tags_found_line.append("well is shut") + tags_found_line.append( "well is shut" ) elif " bhp " in lower_geosLogLine: - tags_found_line.append("BHP") + tags_found_line.append( "BHP" ) elif "total rate" in lower_geosLogLine: - tags_found_line.append("total massRate") + tags_found_line.append( "total massRate" ) if "total surface volumetric rate" in lower_geosLogLine: - tags_found_line.append("total surface volumetricRate") + tags_found_line.append( "total surface volumetricRate" ) elif "surface volumetric rate" in lower_geosLogLine: - tags_found_line.append("phase surface volumetricRate") + tags_found_line.append( "phase surface volumetricRate" ) elif "density of phase" in lower_geosLogLine: - tags_found_line.append("density of phase") + tags_found_line.append( "density of phase" ) elif "total fluid density" in lower_geosLogLine: - tags_found_line.append("total fluid density") + tags_found_line.append( "total fluid density" ) return tags_found_line -def extractValuesWell(geosLogLine: str, numberProperties: int) -> list[float]: +def extractValuesWell( geosLogLine: str, numberProperties: int ) -> list[ float ]: """Extract values from Geos log line and returns them as a list of floats. The idea here is first to extract all floats values from the line. @@ -559,30 +528,20 @@ def extractValuesWell(geosLogLine: str, numberProperties: int) -> list[float]: """ try: if numberProperties > 0: - valuesFound: list[float] = extractFloatsFromString(geosLogLine) - if len(valuesFound) >= numberProperties: - usefulValues: list[float] = valuesFound[-numberProperties:] + valuesFound: list[ float ] = extractFloatsFromString( geosLogLine ) + if len( valuesFound ) >= numberProperties: + usefulValues: list[ float ] = valuesFound[ -numberProperties: ] return usefulValues else: - raise ValueError( - "Number of floats found in line is inferior to number of well properties" - + " in line <<" - + geosLogLine - + ">>." - ) + raise ValueError( "Number of floats found in line is inferior to number of well properties" + + " in line <<" + geosLogLine + ">>." ) else: - raise ValueError( - "No well property found in the well property line <<" - + geosLogLine - + ">>." - ) + raise ValueError( "No well property found in the well property line <<" + geosLogLine + ">>." ) except Exception as e: - raise ValueError( - "Well line not corresponding to expected layering <<" + geosLogLine + ">>." - ) from e + raise ValueError( "Well line not corresponding to expected layering <<" + geosLogLine + ">>." ) from e -def extractValueAndNameAquifer(geosLogLine: str) -> tuple[str, float]: +def extractValueAndNameAquifer( geosLogLine: str ) -> tuple[ str, float ]: """Extract value and name of the aquifer contained in a Geos log line. Args: @@ -596,28 +555,21 @@ def extractValueAndNameAquifer(geosLogLine: str) -> tuple[str, float]: e.g. ("aquifer1", -0.6181975187076816) """ try: - lineElements: list[str] = geosLogLine.split() - indexAquifName: int = lineElements.index("produces") - 1 - indexValue: int = lineElements.index("flux") + 2 - if 0 < indexAquifName < indexValue and indexValue < len(lineElements): - aquifName: str = lineElements[indexAquifName].replace("'", "") - value: float = float(lineElements[indexValue]) - return (aquifName, value) + lineElements: list[ str ] = geosLogLine.split() + indexAquifName: int = lineElements.index( "produces" ) - 1 + indexValue: int = lineElements.index( "flux" ) + 2 + if 0 < indexAquifName < indexValue and indexValue < len( lineElements ): + aquifName: str = lineElements[ indexAquifName ].replace( "'", "" ) + value: float = float( lineElements[ indexValue ] ) + return ( aquifName, value ) else: - raise ValueError( - "Aquifer name or aquifer property value is not given in the line <<" - + geosLogLine - + ">>." - ) + raise ValueError( "Aquifer name or aquifer property value is not given in the line <<" + geosLogLine + + ">>." ) except Exception as e: - raise ValueError( - "Aquifer line not corresponding to expected layering <<" - + geosLogLine - + ">>." - ) from e + raise ValueError( "Aquifer line not corresponding to expected layering <<" + geosLogLine + ">>." ) from e -def correctZeroValuesInListOfValues(values: list[float]) -> list[float]: +def correctZeroValuesInListOfValues( values: list[ float ] ) -> list[ float ]: """Replace orhphelin 0 values of input list. If 0 values are found in a list of values, either replace them with the @@ -630,18 +582,18 @@ def correctZeroValuesInListOfValues(values: list[float]) -> list[float]: Returns: list[float]: list of ints or floats """ - valuesCorrected: list[float] = deepcopy(values) - for i in range(1, len(values) - 1): - valueChecked: float = values[i] + valuesCorrected: list[ float ] = deepcopy( values ) + for i in range( 1, len( values ) - 1 ): + valueChecked: float = values[ i ] if valueChecked == 0: - valueBefore: float = values[i - 1] - valueAfter: float = values[i + 1] + valueBefore: float = values[ i - 1 ] + valueAfter: float = values[ i + 1 ] if valueBefore != 0 or valueAfter != 0: - valuesCorrected[i] = valueBefore + valuesCorrected[ i ] = valueBefore return valuesCorrected -def extractTimeAndDt(geosLogLine: str) -> tuple[float, float]: +def extractTimeAndDt( geosLogLine: str ) -> tuple[ float, float ]: """From a Geos log line, extracts the float values of Time and dt. Args: @@ -660,46 +612,43 @@ def extractTimeAndDt(geosLogLine: str) -> tuple[float, float]: Returns: tuple[float]: (time, dt) """ - timeCounter: dict[str, float] = {"years": 0, "days": 0, "hrs": 0, "min": 0, "s": 0} + timeCounter: dict[ str, float ] = { "years": 0, "days": 0, "hrs": 0, "min": 0, "s": 0 } timeTag: str = "Time:" try: - indexDT: int = geosLogLine.index("dt:") - cycleIndex: int = geosLogLine.index("Cycle:") + indexDT: int = geosLogLine.index( "dt:" ) + cycleIndex: int = geosLogLine.index( "Cycle:" ) except ValueError: - print( - "The log line does not have valid format :\n<<" - + geosLogLine.rstrip() - + ">>\nDefault value of 0.0 returned." - ) - return (0.0, 0.0) - timePart: str = geosLogLine[len(timeTag) : indexDT] + print( "The log line does not have valid format :\n<<" + geosLogLine.rstrip() + + ">>\nDefault value of 0.0 returned." ) + return ( 0.0, 0.0 ) + timePart: str = geosLogLine[ len( timeTag ):indexDT ] # timePart = " {} years, {} days, {} hrs, {} min, {} s, " - timePart = timePart.replace(" ", "")[:-1] + timePart = timePart.replace( " ", "" )[ :-1 ] # timePart = "{}years,{}days,{}hrs,{}min,{}s" - timeElts: list[str] = timePart.split(",") + timeElts: list[ str ] = timePart.split( "," ) # timeElts = ["{}years", "{}days", "{}hrs", "{}min", "{}s"] for elt in timeElts: lastDigitIndex: int = 0 - for i, caracter in enumerate(elt): + for i, caracter in enumerate( elt ): if caracter.isdigit(): lastDigitIndex = i - timeValue: float = float(elt[: lastDigitIndex + 1]) - timeFactor: str = elt[lastDigitIndex + 1 :] + timeValue: float = float( elt[ :lastDigitIndex + 1 ] ) + timeFactor: str = elt[ lastDigitIndex + 1: ] try: - timeCounter[timeFactor] += float(timeValue) + timeCounter[ timeFactor ] += float( timeValue ) except KeyError: - print(f"Cannot add time values for tag=<<{timeFactor}>>") - totalTime: float = timeInSecond(timeCounter) + print( f"Cannot add time values for tag=<<{timeFactor}>>" ) + totalTime: float = timeInSecond( timeCounter ) - dtPart: str = geosLogLine[indexDT:cycleIndex] + dtPart: str = geosLogLine[ indexDT:cycleIndex ] # dtPart = "dt: {} s, " - dtPart = dtPart.replace(" ", "")[3:-2] + dtPart = dtPart.replace( " ", "" )[ 3:-2 ] # dtPart = "{}" - dt: float = float(dtPart) - return (totalTime, dt) + dt: float = float( dtPart ) + return ( totalTime, dt ) -def timeInSecond(timeCounter: dict[str, float]) -> float: +def timeInSecond( timeCounter: dict[ str, float ] ) -> float: """Calculates the time in s from a dict of different time quantities. Args: @@ -709,15 +658,15 @@ def timeInSecond(timeCounter: dict[str, float]) -> float: Returns: float: Sum in seconds of all time quantities. """ - yearsToSeconds: float = timeCounter["years"] * 365.25 * 86400 - daysToSeconds: float = timeCounter["days"] * 86400 - hrsToSeconds: float = timeCounter["hrs"] * 3600 - minsToSeconds: float = timeCounter["min"] * 60 - s: float = timeCounter["s"] + yearsToSeconds: float = timeCounter[ "years" ] * 365.25 * 86400 + daysToSeconds: float = timeCounter[ "days" ] * 86400 + hrsToSeconds: float = timeCounter[ "hrs" ] * 3600 + minsToSeconds: float = timeCounter[ "min" ] * 60 + s: float = timeCounter[ "s" ] return yearsToSeconds + daysToSeconds + hrsToSeconds + minsToSeconds + s -def extractNewtonIter(geosLogLine: str) -> int: +def extractNewtonIter( geosLogLine: str ) -> int: """From a Geos log line, extracts the int value of NewtonIter. Args: @@ -733,23 +682,19 @@ def extractNewtonIter(geosLogLine: str) -> int: int: NewtonIter """ try: - lineClean: str = replaceSpecialCharactersWithWhitespace(geosLogLine) - lineElements: list[str] = lineClean.split() - newtonIterIndex: int = lineElements.index("NewtonIter") - if len(lineElements) > newtonIterIndex + 1: - newtonIter: str = lineElements[newtonIterIndex + 1] - return int(newtonIter) + lineClean: str = replaceSpecialCharactersWithWhitespace( geosLogLine ) + lineElements: list[ str ] = lineClean.split() + newtonIterIndex: int = lineElements.index( "NewtonIter" ) + if len( lineElements ) > newtonIterIndex + 1: + newtonIter: str = lineElements[ newtonIterIndex + 1 ] + return int( newtonIter ) else: - raise ValueError( - "Not enough elements to unpack in line <<" + geosLogLine + ">>." - ) + raise ValueError( "Not enough elements to unpack in line <<" + geosLogLine + ">>." ) except Exception as e: - raise ValueError( - "An error has occured while parsing <<" + geosLogLine + ">>" - ) from e + raise ValueError( "An error has occured while parsing <<" + geosLogLine + ">>" ) from e -def extractLinearIter(geosLogLine: str) -> int: +def extractLinearIter( geosLogLine: str ) -> int: """From a Geos log line, extracts the int value of linear iterations. Args: @@ -766,20 +711,16 @@ def extractLinearIter(geosLogLine: str) -> int: int: 23 """ try: - lineClean: str = replaceSpecialCharactersWithWhitespace(geosLogLine) - lineElements: list[str] = lineClean.split() - iterIndex: int = lineElements.index("Iterations") - if len(lineElements) > iterIndex + 1: - linearIter: str = lineElements[iterIndex + 1] - return int(linearIter) + lineClean: str = replaceSpecialCharactersWithWhitespace( geosLogLine ) + lineElements: list[ str ] = lineClean.split() + iterIndex: int = lineElements.index( "Iterations" ) + if len( lineElements ) > iterIndex + 1: + linearIter: str = lineElements[ iterIndex + 1 ] + return int( linearIter ) else: - raise ValueError( - "Not enough elements to unpack in line <<" + geosLogLine + ">>." - ) + raise ValueError( "Not enough elements to unpack in line <<" + geosLogLine + ">>." ) except Exception as e: - raise ValueError( - "An error has occured while parsing <<" + geosLogLine + ">>" - ) from e + raise ValueError( "An error has occured while parsing <<" + geosLogLine + ">>" ) from e """ @@ -787,7 +728,7 @@ def extractLinearIter(geosLogLine: str) -> int: """ -def replaceSpecialCharactersWithWhitespace(sentence: str) -> str: +def replaceSpecialCharactersWithWhitespace( sentence: str ) -> str: """Replace every special characters in a string with whitespaces. Args: @@ -796,11 +737,11 @@ def replaceSpecialCharactersWithWhitespace(sentence: str) -> str: Returns: str: "hi there " """ - cleanSentence: str = re.sub("[^a-zA-Z0-9\n.+]", " ", sentence) + cleanSentence: str = re.sub( "[^a-zA-Z0-9\n.+]", " ", sentence ) return cleanSentence -def formatPropertyName(propertyName: str) -> str: +def formatPropertyName( propertyName: str ) -> str: """Clean the string by replacing special characters and removing spaces. Args: @@ -809,18 +750,16 @@ def formatPropertyName(propertyName: str) -> str: Returns: str: NameOfTheProperty """ - propertyClean: str = replaceSpecialCharactersWithWhitespace(propertyName) - propertyElements: list[str] = propertyClean.split() - capitalizedPropertyElements: list[str] = [ - elt[0].upper() + elt[1:] for elt in propertyElements - ] + propertyClean: str = replaceSpecialCharactersWithWhitespace( propertyName ) + propertyElements: list[ str ] = propertyClean.split() + capitalizedPropertyElements: list[ str ] = [ elt[ 0 ].upper() + elt[ 1: ] for elt in propertyElements ] formattedName: str = "" for element in capitalizedPropertyElements: formattedName += element return formattedName -def extractFloatsFromString(line: str) -> list[float]: +def extractFloatsFromString( line: str ) -> list[ float ]: """Extracts a list of float numbers from a string. Args: @@ -829,20 +768,20 @@ def extractFloatsFromString(line: str) -> list[float]: Returns: list[float]: [float1, ..., floatN] """ - lineModified: str = deepcopy(line) - replacements: list[str] = ["[", "]", "{", "}"] + lineModified: str = deepcopy( line ) + replacements: list[ str ] = [ "[", "]", "{", "}" ] for replacement in replacements: - lineModified = lineModified.replace(replacement, " ") - elements: list[str] = lineModified.split() - floats: list[float] = [] + lineModified = lineModified.replace( replacement, " " ) + elements: list[ str ] = lineModified.split() + floats: list[ float ] = [] for elt in elements: - if isFloat(elt): - floats.append(float(elt)) + if isFloat( elt ): + floats.append( float( elt ) ) return floats # from https://stackoverflow.com/a/20929881 -def isFloat(element: Any) -> bool: # noqa: ANN401 # disable Any error +def isFloat( element: Any ) -> bool: # noqa: ANN401 # disable Any error """Check whether an element is float or not. Args: @@ -854,13 +793,13 @@ def isFloat(element: Any) -> bool: # noqa: ANN401 # disable Any error if element is None: return False try: - float(element) + float( element ) return True except ValueError: return False -def extractListIntsFromString(string: str) -> list[int]: +def extractListIntsFromString( string: str ) -> list[ int ]: """Builds a list of int numbers from a string. Args: @@ -869,16 +808,16 @@ def extractListIntsFromString(string: str) -> list[int]: Returns: list[int]: [int1, ..., intN] """ - intsFound: list[int] = [] - cleanString: str = replaceSpecialCharactersWithWhitespace(string) - lineElements: list[str] = cleanString.split() + intsFound: list[ int ] = [] + cleanString: str = replaceSpecialCharactersWithWhitespace( string ) + lineElements: list[ str ] = cleanString.split() for elt in lineElements: - with contextlib.suppress(ValueError): - intsFound.append(int(elt)) + with contextlib.suppress( ValueError ): + intsFound.append( int( elt ) ) return intsFound -def extractFirstIntFromString(string: str) -> int: +def extractFirstIntFromString( string: str ) -> int: """Extracts the first int value from a string. Args: @@ -887,18 +826,18 @@ def extractFirstIntFromString(string: str) -> int: Returns: int or None if no int was found. """ - cleanString: str = replaceSpecialCharactersWithWhitespace(string) - lineElements: list[str] = cleanString.split() + cleanString: str = replaceSpecialCharactersWithWhitespace( string ) + lineElements: list[ str ] = cleanString.split() for elt in lineElements: try: - intFound: int = int(elt) + intFound: int = int( elt ) return intFound except ValueError: pass - raise ValueError("Line does not contain int value.") + raise ValueError( "Line does not contain int value." ) -def countNumberLines(filepath: str) -> int: +def countNumberLines( filepath: str ) -> int: """Reads a file to find the number of lines within it. Args: @@ -907,12 +846,12 @@ def countNumberLines(filepath: str) -> int: Returns: int: Number of lines in file. """ - with open(filepath) as file: - numberLines = len(file.readlines()) + with open( filepath ) as file: + numberLines = len( file.readlines() ) return numberLines -def elementsAreInLog(filepath: str, elements: list[str]) -> bool: +def elementsAreInLog( filepath: str, elements: list[ str ] ) -> bool: """Indicates if input file contains element from input list of string. To do so, this reads a file and checks at every line if an @@ -928,20 +867,20 @@ def elementsAreInLog(filepath: str, elements: list[str]) -> bool: Returns: bool: """ - assert len(elements) > 0 - with open(filepath) as file: + assert len( elements ) > 0 + with open( filepath ) as file: for line in file: - if len(elements) == 0: + if len( elements ) == 0: return True for element in elements: if element in line: - indexElement: int = elements.index(element) - elements.pop(indexElement) + indexElement: int = elements.index( element ) + elements.pop( indexElement ) break return False -def findNumberPhasesSimulation(filepath: str) -> int: +def findNumberPhasesSimulation( filepath: str ) -> int: """Find the number of phases from Geos log file. Geos logs do not have explicit message telling you how many phases @@ -956,33 +895,28 @@ def findNumberPhasesSimulation(filepath: str) -> int: Returns: int: The number of phases found in the Geos log. """ - numberLines: int = countNumberLines(filepath) + numberLines: int = countNumberLines( filepath ) # arbitrary number of minimum lines to consider the log as readable assert numberLines > 50 - with open(filepath) as geosFile: + with open( filepath ) as geosFile: line: str = geosFile.readline() id_line: int = 1 - while not line.startswith("Time:") and id_line <= numberLines: + while not line.startswith( "Time:" ) and id_line <= numberLines: line = geosFile.readline() id_line += 1 - if line.startswith("Adding Solver of type") and ( - "singlephase" in line.lower() - ): + if line.startswith( "Adding Solver of type" ) and ( "singlephase" in line.lower() ): return 1 maxPhaseIdWell: int = -1 while id_line <= numberLines: line = geosFile.readline() id_line += 1 if "Phase mass" in line or "Phase dynamic" in line: - valuesFound: list[float] = extractValuesFlow(line) - return len(valuesFound) + valuesFound: list[ float ] = extractValuesFlow( line ) + return len( valuesFound ) lowLine: str = line.lower() - phaseTags: list[str] = [" phase ", " surface "] - if ( - all(tag in lowLine for tag in phaseTags) - and "phase surface" not in lowLine - ): - phaseIdWell: int = extractPhaseId(line) + phaseTags: list[ str ] = [ " phase ", " surface " ] + if ( all( tag in lowLine for tag in phaseTags ) and "phase surface" not in lowLine ): + phaseIdWell: int = extractPhaseId( line ) if maxPhaseIdWell < phaseIdWell: maxPhaseIdWell = phaseIdWell else: @@ -990,7 +924,7 @@ def findNumberPhasesSimulation(filepath: str) -> int: return 0 -def transformUserChoiceToListPhases(userChoice: Union[str, None]) -> list[str]: +def transformUserChoiceToListPhases( userChoice: Union[ str, None ] ) -> list[ str ]: """Get a list of phase name from the input string. When using GeosLogReader, the user can choose the names of the phases @@ -1005,24 +939,21 @@ def transformUserChoiceToListPhases(userChoice: Union[str, None]) -> list[str]: """ if userChoice is None: return [] - choice: str = deepcopy(userChoice) + choice: str = deepcopy( userChoice ) # Regular expression pattern to match any symbol that is not # alphanumeric, comma, or whitespace pattern = r"[^\w ,]" - matches = re.findall(pattern, userChoice) - if bool(matches): - print( - "You cannot use symbols except for commas." - + " Please separate your phase names with whitespace" - + " or with commas." - ) + matches = re.findall( pattern, userChoice ) + if bool( matches ): + print( "You cannot use symbols except for commas." + " Please separate your phase names with whitespace" + + " or with commas." ) return [] - choiceClean: str = choice.replace(",", " ") - phaseNames: list[str] = choiceClean.split() + choiceClean: str = choice.replace( ",", " " ) + phaseNames: list[ str ] = choiceClean.split() return phaseNames -def phaseNamesBuilder(numberPhases: int, phasesFromUser: list[str]) -> list[str]: +def phaseNamesBuilder( numberPhases: int, phasesFromUser: list[ str ] ) -> list[ str ]: """Build phase names. When creating phase names, the user can or cannot have defined his @@ -1038,11 +969,11 @@ def phaseNamesBuilder(numberPhases: int, phasesFromUser: list[str]) -> list[str] Returns: list[str]: [nameFromUser0, nameFromUser1, ..., phaseN-1, phaseN] """ - phaseNames: list[str] = [] - size: int = len(phasesFromUser) - for i in range(numberPhases): + phaseNames: list[ str ] = [] + size: int = len( phasesFromUser ) + for i in range( numberPhases ): if i + 1 > size: - phaseNames.append("phase" + str(i)) + phaseNames.append( "phase" + str( i ) ) else: - phaseNames.append(phasesFromUser[i]) + phaseNames.append( phasesFromUser[ i ] ) return phaseNames diff --git a/geos-pv/src/geos_pv/pyplotUtils/matplotlibOptions.py b/geos-pv/src/geos_pv/pyplotUtils/matplotlibOptions.py index a1db5c6b..311678ce 100644 --- a/geos-pv/src/geos_pv/pyplotUtils/matplotlibOptions.py +++ b/geos-pv/src/geos_pv/pyplotUtils/matplotlibOptions.py @@ -8,8 +8,9 @@ from typing_extensions import Self -class OptionSelectionEnum(Enum): - def __init__(self: Self, displayName: str, optionValue: str) -> None: +class OptionSelectionEnum( Enum ): + + def __init__( self: Self, displayName: str, optionValue: str ) -> None: """Define the enumeration to options for Paraview selectors. Args: @@ -22,53 +23,53 @@ def __init__(self: Self, displayName: str, optionValue: str) -> None: self.optionValue: str = optionValue -class LegendLocationEnum(OptionSelectionEnum): - BEST = ("best", "best") - UPPER_LEFT = ("upper left", "upper left") - UPPER_CENTER = ("upper center", "upper center") - UPPER_RIGHT = ("upper right", "upper right") - CENTER_LEFT = ("center left", "center left") - CENTER = ("center", "center") - CENTER_RIGHT = ("center right", "center right") - LOWER_LEFT = ("lower left", "lower left") - LOWER_CENTER = ("lower center", "lower center") - LOWER_RIGHT = ("lower right", "lower right") +class LegendLocationEnum( OptionSelectionEnum ): + BEST = ( "best", "best" ) + UPPER_LEFT = ( "upper left", "upper left" ) + UPPER_CENTER = ( "upper center", "upper center" ) + UPPER_RIGHT = ( "upper right", "upper right" ) + CENTER_LEFT = ( "center left", "center left" ) + CENTER = ( "center", "center" ) + CENTER_RIGHT = ( "center right", "center right" ) + LOWER_LEFT = ( "lower left", "lower left" ) + LOWER_CENTER = ( "lower center", "lower center" ) + LOWER_RIGHT = ( "lower right", "lower right" ) -class FontStyleEnum(OptionSelectionEnum): - NORMAL = ("normal", "normal") - ITALIC = ("italic", "italic") - OBLIQUE = ("oblique", "oblique") +class FontStyleEnum( OptionSelectionEnum ): + NORMAL = ( "normal", "normal" ) + ITALIC = ( "italic", "italic" ) + OBLIQUE = ( "oblique", "oblique" ) -class FontWeightEnum(OptionSelectionEnum): - NORMAL = ("normal", "normal") - BOLD = ("bold", "bold") - HEAVY = ("heavy", "heavy") - LIGHT = ("light", "light") +class FontWeightEnum( OptionSelectionEnum ): + NORMAL = ( "normal", "normal" ) + BOLD = ( "bold", "bold" ) + HEAVY = ( "heavy", "heavy" ) + LIGHT = ( "light", "light" ) -class LineStyleEnum(OptionSelectionEnum): - NONE = ("None", "None") - SOLID = ("solid", "-") - DASHED = ("dashed", "--") - DASHDOT = ("dashdot", "-.") - DOTTED = ("dotted", ":") +class LineStyleEnum( OptionSelectionEnum ): + NONE = ( "None", "None" ) + SOLID = ( "solid", "-" ) + DASHED = ( "dashed", "--" ) + DASHDOT = ( "dashdot", "-." ) + DOTTED = ( "dotted", ":" ) -class MarkerStyleEnum(OptionSelectionEnum): - NONE = ("None", "") - POINT = ("point", ".") - CIRCLE = ("circle", "o") - TRIANGLE = ("triangle", "^") - SQUARE = ("square", "s") - STAR = ("star", "*") - DIAMOND = ("diamond", "D") - PLUS = ("plus", "+") - X = ("x", "x") +class MarkerStyleEnum( OptionSelectionEnum ): + NONE = ( "None", "" ) + POINT = ( "point", "." ) + CIRCLE = ( "circle", "o" ) + TRIANGLE = ( "triangle", "^" ) + SQUARE = ( "square", "s" ) + STAR = ( "star", "*" ) + DIAMOND = ( "diamond", "D" ) + PLUS = ( "plus", "+" ) + X = ( "x", "x" ) -def optionEnumToXml(enumObj: OptionSelectionEnum) -> str: +def optionEnumToXml( enumObj: OptionSelectionEnum ) -> str: """Creates an enumeration domain from an OptionSelectionEnum object. Dedicated to the dropdown widgets of paraview plugin. @@ -81,7 +82,7 @@ def optionEnumToXml(enumObj: OptionSelectionEnum) -> str: str: the XML string. """ xml: str = """""" - for i, unitObj in enumerate(list(enumObj)): # type: ignore[call-overload] + for i, unitObj in enumerate( list( enumObj ) ): # type: ignore[call-overload] xml += f"""""" xml += """""" return xml diff --git a/geos-pv/src/geos_pv/utils/DisplayOrganizationParaview.py b/geos-pv/src/geos_pv/utils/DisplayOrganizationParaview.py index 8e8f3f96..249ec4b8 100644 --- a/geos-pv/src/geos_pv/utils/DisplayOrganizationParaview.py +++ b/geos-pv/src/geos_pv/utils/DisplayOrganizationParaview.py @@ -5,16 +5,8 @@ from typing import Any from paraview.simple import ( # type: ignore[import-not-found] - AssignViewToLayout, - CreateLayout, - CreateView, - Delete, - GetLayoutByName, - GetLayouts, - GetViews, - GetViewsInLayout, - RemoveLayout, - SetActiveView, + AssignViewToLayout, CreateLayout, CreateView, Delete, GetLayoutByName, GetLayouts, GetViews, GetViewsInLayout, + RemoveLayout, SetActiveView, ) from typing_extensions import Self @@ -23,42 +15,42 @@ def buildNewLayoutWithPythonView() -> Any: # noqa: ANN401 """Create a new PythonView layout.""" # create a new layout organization: DisplayOrganizationParaview = DisplayOrganizationParaview() - layout_names: list[str] = organization.getLayoutsNames() - nb_layouts: int = len(layout_names) - layoutName: str = "Layout #" + str(nb_layouts + 1) + layout_names: list[ str ] = organization.getLayoutsNames() + nb_layouts: int = len( layout_names ) + layoutName: str = "Layout #" + str( nb_layouts + 1 ) # increment layout index until the layout name is a new one cpt: int = 1 while layoutName in layout_names: - layoutName = "Layout #" + str(nb_layouts + cpt) + layoutName = "Layout #" + str( nb_layouts + cpt ) cpt += 1 - organization.addLayout(layoutName) + organization.addLayout( layoutName ) # add a new python view to the layout - organization.addViewToLayout("PythonView", layoutName, 0) - return organization.getLayoutViews()[layoutName][0] + organization.addViewToLayout( "PythonView", layoutName, 0 ) + return organization.getLayoutViews()[ layoutName ][ 0 ] class DisplayOrganizationParaview: """Object to manage Paraview layouts.""" - def __init__(self: Self) -> None: + def __init__( self: Self ) -> None: """Keeps track of Paraview layouts and views when created or removed.""" - self._layouts_keys: list[Any] = [] - self._layout_names: list[str] = [] + self._layouts_keys: list[ Any ] = [] + self._layout_names: list[ str ] = [] self._views_cpt: int = 0 - self._layout_views: dict[str, Any] = {} - self._views_name: dict[str, Any] = {} + self._layout_views: dict[ str, Any ] = {} + self._views_name: dict[ str, Any ] = {} self.initLayouts() self.initLayoutViews() - def initLayouts(self: Self) -> None: + def initLayouts( self: Self ) -> None: """Initialize layouts.""" - self._layouts_keys = list(GetLayouts().keys()) + self._layouts_keys = list( GetLayouts().keys() ) self._layouts_names = [] for layout_tuple in self._layouts_keys: - self._layouts_names.append(layout_tuple[0]) + self._layouts_names.append( layout_tuple[ 0 ] ) - def getLayoutsKeys(self: Self) -> list[Any]: + def getLayoutsKeys( self: Self ) -> list[ Any ]: """Get layout keys. Returns: @@ -66,7 +58,7 @@ def getLayoutsKeys(self: Self) -> list[Any]: """ return self._layouts_keys - def getLayoutsNames(self: Self) -> list[str]: + def getLayoutsNames( self: Self ) -> list[ str ]: """Get layout names. Returns: @@ -74,15 +66,15 @@ def getLayoutsNames(self: Self) -> list[str]: """ return self._layouts_names - def getNumberLayouts(self: Self) -> int: + def getNumberLayouts( self: Self ) -> int: """Get the number of layouts. Returns: int: number of layouts. """ - return len(self._layouts_keys) + return len( self._layouts_keys ) - def getViewsCpt(self: Self) -> int: + def getViewsCpt( self: Self ) -> int: """Get the number of views. Returns: @@ -90,28 +82,28 @@ def getViewsCpt(self: Self) -> int: """ return self._views_cpt - def addOneToCpt(self: Self) -> None: + def addOneToCpt( self: Self ) -> None: """Increment number of views.""" self._views_cpt += 1 - def initLayoutViews(self: Self) -> None: + def initLayoutViews( self: Self ) -> None: """Initialize layout views.""" self._views_name = {} self._layout_views = {} - all_views: list[Any] = GetViews() - layouts_keys: list[Any] = self.getLayoutsKeys() - layout_names: list[str] = self.getLayoutsNames() - for i in range(self.getNumberLayouts()): - self._layout_views[layout_names[i]] = [] - views_in_layout = GetViewsInLayout(GetLayouts()[layouts_keys[i]]) + all_views: list[ Any ] = GetViews() + layouts_keys: list[ Any ] = self.getLayoutsKeys() + layout_names: list[ str ] = self.getLayoutsNames() + for i in range( self.getNumberLayouts() ): + self._layout_views[ layout_names[ i ] ] = [] + views_in_layout = GetViewsInLayout( GetLayouts()[ layouts_keys[ i ] ] ) for view in all_views: if view in views_in_layout: - self._layout_views[layout_names[i]].append(view) - name_view: str = "view" + str(self.getViewsCpt()) - self._views_name[name_view] = view + self._layout_views[ layout_names[ i ] ].append( view ) + name_view: str = "view" + str( self.getViewsCpt() ) + self._views_name[ name_view ] = view self.addOneToCpt() - def getLayoutViews(self: Self) -> dict[str, Any]: + def getLayoutViews( self: Self ) -> dict[ str, Any ]: """Get layout views. Returns: @@ -119,7 +111,7 @@ def getLayoutViews(self: Self) -> dict[str, Any]: """ return self._layout_views - def getViewsName(self: Self) -> dict[str, Any]: + def getViewsName( self: Self ) -> dict[ str, Any ]: """Get view names. Returns: @@ -127,41 +119,37 @@ def getViewsName(self: Self) -> dict[str, Any]: """ return self._views_name - def updateOrganization(self: Self) -> None: + def updateOrganization( self: Self ) -> None: """Update layouts.""" self._views_cpt = 0 self.initLayouts() self.initLayoutViews() - def addLayout(self: Self, new_layout_name: str) -> None: + def addLayout( self: Self, new_layout_name: str ) -> None: """Add a layout. Args: new_layout_name (str): name of the new layout. """ if new_layout_name not in self.getLayoutsNames(): - CreateLayout(new_layout_name) + CreateLayout( new_layout_name ) else: - print( - f'This layout name "{new_layout_name}" is already used, please pick a new one.\n' - ) + print( f'This layout name "{new_layout_name}" is already used, please pick a new one.\n' ) self.updateOrganization() - def removeLayout(self: Self, layout_name: str) -> None: + def removeLayout( self: Self, layout_name: str ) -> None: """Remove a layout. Args: layout_name (str): name of the layout to remove. """ if layout_name not in self.getLayoutsNames(): - RemoveLayout(GetLayoutByName(layout_name)) + RemoveLayout( GetLayoutByName( layout_name ) ) else: - print(f'This layout name "{layout_name}" does not exist.') + print( f'This layout name "{layout_name}" does not exist.' ) self.updateOrganization() - def addViewToLayout( - self: Self, viewType: str, layout_name: str, position: int - ) -> None: + def addViewToLayout( self: Self, viewType: str, layout_name: str, position: int ) -> None: """Add a view to a layout. Args: @@ -169,15 +157,13 @@ def addViewToLayout( layout_name (str): name of the layout. position (int): position of the view. """ - SetActiveView(None) - layout_to_use = GetLayoutByName(layout_name) - new_view = CreateView(viewType) - AssignViewToLayout(view=new_view, layout=layout_to_use, hint=position) + SetActiveView( None ) + layout_to_use = GetLayoutByName( layout_name ) + new_view = CreateView( viewType ) + AssignViewToLayout( view=new_view, layout=layout_to_use, hint=position ) self.updateOrganization() - def RemoveViewFromLayout( - self: Self, view_name: str, layout_name: str, position: int - ) -> None: + def RemoveViewFromLayout( self: Self, view_name: str, layout_name: str, position: int ) -> None: """Remove a view from a layout. Args: @@ -185,18 +171,16 @@ def RemoveViewFromLayout( layout_name (str): name of the layout. position (int): position of the view. """ - views_name: dict[str, Any] = self.getViewsName() - view_to_delete = views_name[view_name] - SetActiveView(view_to_delete) - Delete(view_to_delete) + views_name: dict[ str, Any ] = self.getViewsName() + view_to_delete = views_name[ view_name ] + SetActiveView( view_to_delete ) + Delete( view_to_delete ) del view_to_delete - layout_to_use = GetLayoutByName(layout_name) - layout_to_use.Collapse(position) + layout_to_use = GetLayoutByName( layout_name ) + layout_to_use.Collapse( position ) self.updateOrganization() - def SwapCellsInLayout( - self: Self, layout_name: str, position1: int, position2: int - ) -> None: + def SwapCellsInLayout( self: Self, layout_name: str, position1: int, position2: int ) -> None: """Swap views in a layout. Args: @@ -204,5 +188,5 @@ def SwapCellsInLayout( position1 (int): first position of the view. position2 (int): second position of the view. """ - layout_to_use = GetLayoutByName(layout_name) - layout_to_use.SwapCells(position1, position2) + layout_to_use = GetLayoutByName( layout_name ) + layout_to_use.SwapCells( position1, position2 ) diff --git a/geos-pv/src/geos_pv/utils/checkboxFunction.py b/geos-pv/src/geos_pv/utils/checkboxFunction.py index 7fce5261..6e0a250e 100644 --- a/geos-pv/src/geos_pv/utils/checkboxFunction.py +++ b/geos-pv/src/geos_pv/utils/checkboxFunction.py @@ -3,7 +3,7 @@ # SPDX-FileContributor: Alexandre Benedicto # ruff: noqa # type: ignore -def createModifiedCallback(anobject): +def createModifiedCallback( anobject ): """Helper for the creation and use of vtkDataArraySelection in ParaView. Args: @@ -11,10 +11,10 @@ def createModifiedCallback(anobject): """ import weakref - weakref_obj = weakref.ref(anobject) + weakref_obj = weakref.ref( anobject ) anobject = None - def _markmodified(*args, **kwars): + def _markmodified( *args, **kwars ): o = weakref_obj() if o is not None: o.Modified() diff --git a/geos-pv/src/geos_pv/utils/paraviewTreatments.py b/geos-pv/src/geos_pv/utils/paraviewTreatments.py index e8e15724..7aca71c3 100644 --- a/geos-pv/src/geos_pv/utils/paraviewTreatments.py +++ b/geos-pv/src/geos_pv/utils/paraviewTreatments.py @@ -9,15 +9,9 @@ import numpy.typing as npt import pandas as pd # type: ignore[import-untyped] from paraview.modules.vtkPVVTKExtensionsMisc import ( # type: ignore[import-not-found] - vtkMergeBlocks, -) + vtkMergeBlocks, ) from paraview.simple import ( # type: ignore[import-not-found] - FindSource, - GetActiveView, - GetAnimationScene, - GetDisplayProperties, - GetSources, - servermanager, + FindSource, GetActiveView, GetAnimationScene, GetDisplayProperties, GetSources, servermanager, ) import vtkmodules.util.numpy_support as vnp from vtkmodules.vtkCommonCore import ( @@ -47,10 +41,10 @@ # valid sources for Python view configurator # TODO: need to be consolidated -HARD_CODED_VALID_PVC_TYPE: set[str] = {"GeosLogReader", "RenameArrays"} +HARD_CODED_VALID_PVC_TYPE: set[ str ] = { "GeosLogReader", "RenameArrays" } -def vtkTableToDataframe(table: vtkTable) -> pd.DataFrame: +def vtkTableToDataframe( table: vtkTable ) -> pd.DataFrame: """From a vtkTable, creates and returns a pandas dataframe. Args: @@ -59,21 +53,21 @@ def vtkTableToDataframe(table: vtkTable) -> pd.DataFrame: Returns: pd.DataFrame: Pandas dataframe. """ - data: list[dict[str, Any]] = [] - for rowIndex in range(table.GetNumberOfRows()): - rowData: dict[str, Any] = {} - for colIndex in range(table.GetNumberOfColumns()): - colName: str = table.GetColumnName(colIndex) - cellValue: Any = table.GetValue(rowIndex, colIndex) + data: list[ dict[ str, Any ] ] = [] + for rowIndex in range( table.GetNumberOfRows() ): + rowData: dict[ str, Any ] = {} + for colIndex in range( table.GetNumberOfColumns() ): + colName: str = table.GetColumnName( colIndex ) + cellValue: Any = table.GetValue( rowIndex, colIndex ) # we have a vtkVariant value, we need a float cellValueF: float = cellValue.ToFloat() - rowData[colName] = cellValueF - data.append(rowData) - df: pd.DataFrame = pd.DataFrame(data) + rowData[ colName ] = cellValueF + data.append( rowData ) + df: pd.DataFrame = pd.DataFrame( data ) return df -def vtkPolyDataToPointsDataframe(polydata: vtkPolyData) -> pd.DataFrame: +def vtkPolyDataToPointsDataframe( polydata: vtkPolyData ) -> pd.DataFrame: """Creates a pandas dataframe containing points data from vtkPolyData. Args: @@ -85,37 +79,37 @@ def vtkPolyDataToPointsDataframe(polydata: vtkPolyData) -> pd.DataFrame: points: vtkPoints = polydata.GetPoints() assert points is not None, "Points is undefined." nbrPoints: int = points.GetNumberOfPoints() - data: dict[str, Any] = { - "Point ID": np.empty(nbrPoints), - "PointsX": np.empty(nbrPoints), - "PointsY": np.empty(nbrPoints), - "PointsZ": np.empty(nbrPoints), + data: dict[ str, Any ] = { + "Point ID": np.empty( nbrPoints ), + "PointsX": np.empty( nbrPoints ), + "PointsY": np.empty( nbrPoints ), + "PointsZ": np.empty( nbrPoints ), } - for pointID in range(nbrPoints): - point: tuple[float, float, float] = points.GetPoint(pointID) - data["Point ID"][pointID] = pointID - data["PointsX"][pointID] = point[0] - data["PointsY"][pointID] = point[1] - data["PointsZ"][pointID] = point[2] + for pointID in range( nbrPoints ): + point: tuple[ float, float, float ] = points.GetPoint( pointID ) + data[ "Point ID" ][ pointID ] = pointID + data[ "PointsX" ][ pointID ] = point[ 0 ] + data[ "PointsY" ][ pointID ] = point[ 1 ] + data[ "PointsZ" ][ pointID ] = point[ 2 ] pointData = polydata.GetPointData() nbrArrays: int = pointData.GetNumberOfArrays() - for i in range(nbrArrays): - arrayToUse = pointData.GetArray(i) - arrayName: str = pointData.GetArrayName(i) - subArrayNames: list[str] = findSubArrayNames(arrayToUse, arrayName) + for i in range( nbrArrays ): + arrayToUse = pointData.GetArray( i ) + arrayName: str = pointData.GetArrayName( i ) + subArrayNames: list[ str ] = findSubArrayNames( arrayToUse, arrayName ) # Collect the data for each sub array - for ind, name in enumerate(subArrayNames): - data[name] = np.empty(nbrPoints) - for k in range(nbrPoints): + for ind, name in enumerate( subArrayNames ): + data[ name ] = np.empty( nbrPoints ) + for k in range( nbrPoints ): # Every element of the tuple correspond to one distinct # sub array so we only need one value at a time - value: float = arrayToUse.GetTuple(k)[ind] - data[name][k] = value - df: pd.DataFrame = pd.DataFrame(data).set_index("Point ID") + value: float = arrayToUse.GetTuple( k )[ ind ] + data[ name ][ k ] = value + df: pd.DataFrame = pd.DataFrame( data ).set_index( "Point ID" ) return df -def vtkUnstructuredGridCellsToDataframe(grid: vtkUnstructuredGrid) -> pd.DataFrame: +def vtkUnstructuredGridCellsToDataframe( grid: vtkUnstructuredGrid ) -> pd.DataFrame: """Creates a pandas dataframe containing points data from vtkUnstructuredGrid. Args: @@ -127,28 +121,28 @@ def vtkUnstructuredGridCellsToDataframe(grid: vtkUnstructuredGrid) -> pd.DataFra cellIdAttributeName = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName cellData = grid.GetCellData() numberCells: int = grid.GetNumberOfCells() - data: dict[str, Any] = {} - for i in range(cellData.GetNumberOfArrays()): - arrayToUse = cellData.GetArray(i) - arrayName: str = cellData.GetArrayName(i) - subArrayNames: list[str] = findSubArrayNames(arrayToUse, arrayName) + data: dict[ str, Any ] = {} + for i in range( cellData.GetNumberOfArrays() ): + arrayToUse = cellData.GetArray( i ) + arrayName: str = cellData.GetArrayName( i ) + subArrayNames: list[ str ] = findSubArrayNames( arrayToUse, arrayName ) # Collect the data for each sub array - for ind, name in enumerate(subArrayNames): - data[name] = np.empty(numberCells) - for k in range(numberCells): + for ind, name in enumerate( subArrayNames ): + data[ name ] = np.empty( numberCells ) + for k in range( numberCells ): # Every element of the tuple correspond to one distinct # sub array so we only need one value at a time - value: float = arrayToUse.GetTuple(k)[ind] - data[name][k] = value - df: pd.DataFrame = pd.DataFrame(data).astype({cellIdAttributeName: int}) + value: float = arrayToUse.GetTuple( k )[ ind ] + data[ name ][ k ] = value + df: pd.DataFrame = pd.DataFrame( data ).astype( { cellIdAttributeName: int } ) # set cell ids as index # df = df.astype({cellIdAttributeName: int}) - return df.set_index(cellIdAttributeName) + return df.set_index( cellIdAttributeName ) -def vtkToDataframe(dataset: vtkDataObject) -> pd.DataFrame: +def vtkToDataframe( dataset: vtkDataObject ) -> pd.DataFrame: """Creates a dataframe containing points data from vtkTable or vtkPolyData. Args: @@ -157,20 +151,18 @@ def vtkToDataframe(dataset: vtkDataObject) -> pd.DataFrame: Returns: pd.DataFrame: if the dataset is in the right format. """ - if isinstance(dataset, vtkTable): - return vtkTableToDataframe(dataset) - elif isinstance(dataset, vtkPolyData): - return vtkPolyDataToPointsDataframe(dataset) - elif isinstance(dataset, vtkUnstructuredGrid): - return vtkUnstructuredGridCellsToDataframe(dataset) + if isinstance( dataset, vtkTable ): + return vtkTableToDataframe( dataset ) + elif isinstance( dataset, vtkPolyData ): + return vtkPolyDataToPointsDataframe( dataset ) + elif isinstance( dataset, vtkUnstructuredGrid ): + return vtkUnstructuredGridCellsToDataframe( dataset ) else: - raise AssertionError( - f"Invalid dataset format {type(dataset)}. " - + "Supported formats are: vtkTable, vtkpolyData and vtkUnstructuredGrid" - ) + raise AssertionError( f"Invalid dataset format {type(dataset)}. " + + "Supported formats are: vtkTable, vtkpolyData and vtkUnstructuredGrid" ) -def findSubArrayNames(vtkArray: vtkDataArray, arrayName: str) -> list[str]: +def findSubArrayNames( vtkArray: vtkDataArray, arrayName: str ) -> list[ str ]: """Get sub array names from multi array attributes. Because arrays in ParaView can be of multiple dimensions, @@ -189,23 +181,21 @@ def findSubArrayNames(vtkArray: vtkDataArray, arrayName: str) -> list[str]: # The ordering of six elements can seem odd but is adapted to # Geos output format of stress as : # sigma11, sigma22, sigma33, sigma23, sigma13, sigma12 - sixComponents: tuple[str, str, str, str, str, str] = ComponentNameEnum.XYZ.value + sixComponents: tuple[ str, str, str, str, str, str ] = ComponentNameEnum.XYZ.value nbrComponents: int = vtkArray.GetNumberOfComponents() - subArrayNames: list[str] = [] + subArrayNames: list[ str ] = [] if nbrComponents == 1: - subArrayNames.append(arrayName) + subArrayNames.append( arrayName ) elif nbrComponents < 6: - for j in range(nbrComponents): - subArrayNames.append(arrayName + "_" + sixComponents[j]) + for j in range( nbrComponents ): + subArrayNames.append( arrayName + "_" + sixComponents[ j ] ) else: - for j in range(nbrComponents): - subArrayNames.append(arrayName + "_" + str(j)) + for j in range( nbrComponents ): + subArrayNames.append( arrayName + "_" + str( j ) ) return subArrayNames -def getDataframesFromMultipleVTKSources( - sourceNames: set[str], commonColumn: str -) -> list[pd.DataFrame]: +def getDataframesFromMultipleVTKSources( sourceNames: set[ str ], commonColumn: str ) -> list[ pd.DataFrame ]: """Creates the dataframe from each source if they have the commonColumn. Args: @@ -217,32 +207,25 @@ def getDataframesFromMultipleVTKSources( """ # indexSource: int = commonColumn.rfind("__") # commonColumnNoSource: str = commonColumn[:indexSource] - validDataframes: list[pd.DataFrame] = [] + validDataframes: list[ pd.DataFrame ] = [] for name in sourceNames: - source = FindSource(name) + source = FindSource( name ) assert source is not None, "Source is undefined." - dataset = servermanager.Fetch(source) + dataset = servermanager.Fetch( source ) assert dataset is not None, "Dataset is undefined." - currentDF: pd.DataFrame = vtkToDataframe(dataset) + currentDF: pd.DataFrame = vtkToDataframe( dataset ) if commonColumn in currentDF.columns: dfModified = currentDF.rename( - columns={ - col: col + "__" + name - for col in currentDF.columns - if col != commonColumn - } - ) - validDataframes.append(dfModified) + columns={ col: col + "__" + name + for col in currentDF.columns if col != commonColumn } ) + validDataframes.append( dfModified ) else: - print( - f"The source <<{name}>> could not be used" - + " to plot because the variable named <<" - + f"{commonColumn}>> could not be found." - ) + print( f"The source <<{name}>> could not be used" + " to plot because the variable named <<" + + f"{commonColumn}>> could not be found." ) return validDataframes -def mergeDataframes(dataframes: list[pd.DataFrame], commonColumn: str) -> pd.DataFrame: +def mergeDataframes( dataframes: list[ pd.DataFrame ], commonColumn: str ) -> pd.DataFrame: """Merge all dataframes into a single one by using the common column. Args: @@ -255,22 +238,20 @@ def mergeDataframes(dataframes: list[pd.DataFrame], commonColumn: str) -> pd.Dat pd.DataFrame: Merged dataframes into a single one by 'outer' on the commonColumn. """ - assert len(dataframes) > 0 - if len(dataframes) == 1: - return dataframes[0] + assert len( dataframes ) > 0 + if len( dataframes ) == 1: + return dataframes[ 0 ] else: - df0: pd.DataFrame = dataframes[0] - df1: pd.DataFrame = dataframes[1] - merged: pd.DataFrame = df0.merge(df1, on=commonColumn, how="outer") - if len(dataframes) > 2: - for df in dataframes[2:]: - merged = merged.merge(df, on=commonColumn, how="outer") + df0: pd.DataFrame = dataframes[ 0 ] + df1: pd.DataFrame = dataframes[ 1 ] + merged: pd.DataFrame = df0.merge( df1, on=commonColumn, how="outer" ) + if len( dataframes ) > 2: + for df in dataframes[ 2: ]: + merged = merged.merge( df, on=commonColumn, how="outer" ) return merged -def addDataframeColumnsToVtkPolyData( - polyData: vtkPolyData, df: pd.DataFrame -) -> vtkPolyData: +def addDataframeColumnsToVtkPolyData( polyData: vtkPolyData, df: pd.DataFrame ) -> vtkPolyData: """Add columns from a dataframe to a vtkPolyData. Args: @@ -281,13 +262,13 @@ def addDataframeColumnsToVtkPolyData( vtkPolyData: vtkPolyData with new arrays. """ for column_name in df.columns: - column = df[column_name].values + column = df[ column_name ].values array = vtkDoubleArray() - array.SetName(column_name) - array.SetNumberOfValues(polyData.GetNumberOfPoints()) - for i in range(polyData.GetNumberOfPoints()): - array.SetValue(i, column[i]) - polyData.GetPointData().AddArray(array) + array.SetName( column_name ) + array.SetNumberOfValues( polyData.GetNumberOfPoints() ) + for i in range( polyData.GetNumberOfPoints() ): + array.SetValue( i, column[ i ] ) + polyData.GetPointData().AddArray( array ) # Update vtkPolyData object polyData.GetPointData().Modified() @@ -298,7 +279,7 @@ def addDataframeColumnsToVtkPolyData( # Functions to help the processing of PythonViewConfigurator -def getPossibleSourceNames() -> set[str]: +def getPossibleSourceNames() -> set[ str ]: """Get the list of valid source names for PythonViewConfigurator. In PythonViewConfigurator, multiple sources can be considered as @@ -310,18 +291,18 @@ def getPossibleSourceNames() -> set[str]: set[str]: Source names in the paraview pipeline. """ # get all sources different from PythonViewConfigurator - validNames: set[str] = set() + validNames: set[ str ] = set() for k in GetSources(): - sourceName: str = k[0] - source = FindSource(sourceName) - if (source is not None) and ("PythonViewConfigurator" not in source.__str__()): - dataset = servermanager.Fetch(source) - if dataset.IsA("vtkPolyData") or dataset.IsA("vtkTable"): - validNames.add(sourceName) + sourceName: str = k[ 0 ] + source = FindSource( sourceName ) + if ( source is not None ) and ( "PythonViewConfigurator" not in source.__str__() ): + dataset = servermanager.Fetch( source ) + if dataset.IsA( "vtkPolyData" ) or dataset.IsA( "vtkTable" ): + validNames.add( sourceName ) return validNames -def usefulSourceNamesPipeline() -> set[str]: +def usefulSourceNamesPipeline() -> set[ str ]: """Get the list of valid pipelines for PythonViewConfigurator. When using the PythonViewConfigurator, we want to check if the sources @@ -333,16 +314,16 @@ def usefulSourceNamesPipeline() -> set[str]: Returns: set[str]: [sourceName1, ..., sourceNameN] """ - usefulSourceNames: set[str] = set() - allSourceNames: set[str] = {n[0] for n, s in GetSources().items()} + usefulSourceNames: set[ str ] = set() + allSourceNames: set[ str ] = { n[ 0 ] for n, s in GetSources().items() } for name in allSourceNames: - source = FindSource(name) - if type(source).__name__ in HARD_CODED_VALID_PVC_TYPE: - usefulSourceNames.add(name) + source = FindSource( name ) + if type( source ).__name__ in HARD_CODED_VALID_PVC_TYPE: + usefulSourceNames.add( name ) return usefulSourceNames -def getDatasFromSources(sourceNames: set[str]) -> dict[str, pd.DataFrame]: +def getDatasFromSources( sourceNames: set[ str ] ) -> dict[ str, pd.DataFrame ]: """Get the data from input sources. Args: @@ -355,14 +336,14 @@ def getDatasFromSources(sourceNames: set[str]) -> dict[str, pd.DataFrame]: ... sourceNameN: servermanager.Fetch(FindSource(sourceNameN)) } """ - usefulDatas: dict[str, Any] = {} + usefulDatas: dict[ str, Any ] = {} for name in sourceNames: - dataset = servermanager.Fetch(FindSource(name)) - usefulDatas[name] = dataset + dataset = servermanager.Fetch( FindSource( name ) ) + usefulDatas[ name ] = dataset return usefulDatas -def usefulVisibleDatasPipeline() -> dict[str, Any]: +def usefulVisibleDatasPipeline() -> dict[ str, Any ]: """Get the list of visible pipelines. When using the PythonViewConfigurator, we want to collect the data of @@ -381,22 +362,22 @@ def usefulVisibleDatasPipeline() -> dict[str, Any]: ... sourceNameN: servermanager.Fetch(FindSource(sourceNameN)) } """ - usefulDatas: dict[str, Any] = {} - sourceNamesVisible: set[str] = set() + usefulDatas: dict[ str, Any ] = {} + sourceNamesVisible: set[ str ] = set() for n, s in GetSources().items(): - if servermanager.GetRepresentation(s, GetActiveView()) is not None: - displayProperties = GetDisplayProperties(s, view=GetActiveView()) - if (displayProperties is not None) and (displayProperties.Visibility == 1): - sourceNamesVisible.add(n[0]) + if servermanager.GetRepresentation( s, GetActiveView() ) is not None: + displayProperties = GetDisplayProperties( s, view=GetActiveView() ) + if ( displayProperties is not None ) and ( displayProperties.Visibility == 1 ): + sourceNamesVisible.add( n[ 0 ] ) for name in sourceNamesVisible: - source = FindSource(name) - if type(source).__name__ in HARD_CODED_VALID_PVC_TYPE: - usefulDatas[name] = servermanager.Fetch(FindSource(name)) + source = FindSource( name ) + if type( source ).__name__ in HARD_CODED_VALID_PVC_TYPE: + usefulDatas[ name ] = servermanager.Fetch( FindSource( name ) ) return usefulDatas -def isFilter(sourceName: str) -> bool: +def isFilter( sourceName: str ) -> bool: """Identify if a source name can link to a filter in the ParaView pipeline. Args: @@ -405,9 +386,9 @@ def isFilter(sourceName: str) -> bool: Returns: bool: True if filter, False instead. """ - source: Any = FindSource(sourceName) + source: Any = FindSource( sourceName ) if source is None: - print(f"sourceName <<{sourceName}>> does not exist in the pipeline") + print( f"sourceName <<{sourceName}>> does not exist in the pipeline" ) return False else: try: @@ -417,7 +398,7 @@ def isFilter(sourceName: str) -> bool: return False -def getFilterInput(sourceName: str) -> vtkDataObject: +def getFilterInput( sourceName: str ) -> vtkDataObject: """Access the vtk dataset that is used as input for a filter. Args: @@ -426,19 +407,19 @@ def getFilterInput(sourceName: str) -> vtkDataObject: Returns: Any: The vtk dataset that serves as input for the filter. """ - filtre = FindSource(sourceName) + filtre = FindSource( sourceName ) assert filtre is not None, "Source is undefined." clientSideObject = filtre.GetClientSideObject() assert clientSideObject is not None, "Client Side Object is undefined." inputAlgo = clientSideObject.GetInputAlgorithm() assert inputAlgo is not None, "Input Algorithm is undefined." inputValues = inputAlgo.GetInput() - if isinstance(inputValues, vtkDataObject): + if isinstance( inputValues, vtkDataObject ): return inputValues return vtkDataObject() -def getArrayChoices(array: vtkDataArraySelection) -> list[str]: +def getArrayChoices( array: vtkDataArraySelection ) -> list[ str ]: """Extracts the column names of input array when they are enabled. Args: @@ -447,15 +428,15 @@ def getArrayChoices(array: vtkDataArraySelection) -> list[str]: Returns: set[str]: [columnName1, ..., columnNameN] """ - checkedColumns: list[str] = [] - for i in range(array.GetNumberOfArrays()): - columnName: str = array.GetArrayName(i) - if array.ArrayIsEnabled(columnName): - checkedColumns.append(columnName) + checkedColumns: list[ str ] = [] + for i in range( array.GetNumberOfArrays() ): + columnName: str = array.GetArrayName( i ) + if array.ArrayIsEnabled( columnName ): + checkedColumns.append( columnName ) return checkedColumns -def integrateSourceNames(sourceNames: set[str], arrayChoices: set[str]) -> set[str]: +def integrateSourceNames( sourceNames: set[ str ], arrayChoices: set[ str ] ) -> set[ str ]: """Aggregate source and arrayChoices names. When creating the user choices in PythonViewConfigurator, you need @@ -471,17 +452,15 @@ def integrateSourceNames(sourceNames: set[str], arrayChoices: set[str]) -> set[s set[str]: [sourceName1__choice1, sourceName1__choice2, ..., sourceNameN__choiceN] """ - completeNames: set[str] = set() + completeNames: set[ str ] = set() for sourceName in sourceNames: for choice in arrayChoices: completeName: str = choice + "__" + sourceName - completeNames.add(completeName) + completeNames.add( completeName ) return completeNames -def getVtkOriginalCellIds( - mesh: Union[vtkMultiBlockDataSet, vtkCompositeDataSet, vtkDataObject] -) -> list[str]: +def getVtkOriginalCellIds( mesh: Union[ vtkMultiBlockDataSet, vtkCompositeDataSet, vtkDataObject ] ) -> list[ str ]: """Get vtkOriginalCellIds from a vtkUnstructuredGrid object. Args: @@ -491,18 +470,18 @@ def getVtkOriginalCellIds( list[str]: ids of the cells. """ # merge blocks for vtkCompositeDataSet - mesh2: vtkUnstructuredGrid = mergeFilterPV(mesh) + mesh2: vtkUnstructuredGrid = mergeFilterPV( mesh ) attributeName: str = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName data: vtkCellData = mesh2.GetCellData() assert data is not None, "Cell Data are undefined." - assert bool(data.HasArray(attributeName)), f"Attribute {attributeName} is not in the mesh" + assert bool( data.HasArray( attributeName ) ), f"Attribute {attributeName} is not in the mesh" - array: vtkDoubleArray = data.GetArray(attributeName) - nparray: npt.NDArray[np.float64] = vnp.vtk_to_numpy(array) # type: ignore[no-untyped-call] - return [str(int(ide)) for ide in nparray] + array: vtkDoubleArray = data.GetArray( attributeName ) + nparray: npt.NDArray[ np.float64 ] = vnp.vtk_to_numpy( array ) # type: ignore[no-untyped-call] + return [ str( int( ide ) ) for ide in nparray ] -def strEnumToEnumerationDomainXml(enumObj: Enum) -> str: +def strEnumToEnumerationDomainXml( enumObj: Enum ) -> str: """Creates an enumeration domain from an Enum objec. Creates an enumeration domain from an Enum objec @@ -515,13 +494,13 @@ def strEnumToEnumerationDomainXml(enumObj: Enum) -> str: str: the XML string. """ xml: str = """""" - for i, unitObj in enumerate(list(enumObj)): # type: ignore[call-overload] + for i, unitObj in enumerate( list( enumObj ) ): # type: ignore[call-overload] xml += f"""""" xml += """""" return xml -def strListToEnumerationDomainXml(properties: Union[list[str], set[str]]) -> str: +def strListToEnumerationDomainXml( properties: Union[ list[ str ], set[ str ] ] ) -> str: """Creates an enumeration domain from a list of strings. Creates an enumeration domain from a list of strings @@ -534,13 +513,13 @@ def strListToEnumerationDomainXml(properties: Union[list[str], set[str]]) -> str str: the XML string. """ xml: str = """""" - for i, prop in enumerate(list(properties)): + for i, prop in enumerate( list( properties ) ): xml += f"""""" xml += """""" return xml -def dataframeForEachTimestep(sourceName: str) -> dict[str, pd.DataFrame]: +def dataframeForEachTimestep( sourceName: str ) -> dict[ str, pd.DataFrame ]: """Get the data from source at each time step. In ParaView, a source object can contain data for multiple @@ -559,24 +538,24 @@ def dataframeForEachTimestep(sourceName: str) -> dict[str, pd.DataFrame]: assert animationScene is not None, "animationScene is undefined." # we set the animation to the initial timestep animationScene.GoToFirst() - source = FindSource(sourceName) - dataset: vtkDataObject = servermanager.Fetch(source) + source = FindSource( sourceName ) + dataset: vtkDataObject = servermanager.Fetch( source ) assert dataset is not None, f"Dataset is undefined." - dataset2: vtkUnstructuredGrid = mergeFilterPV(dataset) - time: str = str(animationScene.TimeKeeper.Time) - dfPerTimestep: dict[str, pd.DataFrame] = {time: vtkToDataframe(dataset2)} + dataset2: vtkUnstructuredGrid = mergeFilterPV( dataset ) + time: str = str( animationScene.TimeKeeper.Time ) + dfPerTimestep: dict[ str, pd.DataFrame ] = { time: vtkToDataframe( dataset2 ) } # then we iterate on the other timesteps of the source - for _ in range(animationScene.NumberOfFrames): # type: ignore + for _ in range( animationScene.NumberOfFrames ): # type: ignore animationScene.GoToNext() - source = FindSource(sourceName) - dataset = servermanager.Fetch(source) - dataset2 = mergeFilterPV(dataset) - time = str(animationScene.TimeKeeper.Time) - dfPerTimestep[time] = vtkToDataframe(dataset2) + source = FindSource( sourceName ) + dataset = servermanager.Fetch( source ) + dataset2 = mergeFilterPV( dataset ) + time = str( animationScene.TimeKeeper.Time ) + dfPerTimestep[ time ] = vtkToDataframe( dataset2 ) return dfPerTimestep -def getTimeStepIndex(time: float, timeSteps: npt.NDArray[np.float64]) -> int: +def getTimeStepIndex( time: float, timeSteps: npt.NDArray[ np.float64 ] ) -> int: """Get the time step index of input time from the list of time steps. Args: @@ -586,16 +565,12 @@ def getTimeStepIndex(time: float, timeSteps: npt.NDArray[np.float64]) -> int: Returns: int: time step index """ - indexes: npt.NDArray[np.int64] = np.where(np.isclose(timeSteps, time))[0] - assert ( - indexes.size > 0 - ), f"Current time {time} does not exist in the selected object." - return int(indexes[0]) + indexes: npt.NDArray[ np.int64 ] = np.where( np.isclose( timeSteps, time ) )[ 0 ] + assert ( indexes.size > 0 ), f"Current time {time} does not exist in the selected object." + return int( indexes[ 0 ] ) -def mergeFilterPV( - input: vtkDataObject, -) -> vtkUnstructuredGrid: +def mergeFilterPV( input: vtkDataObject, ) -> vtkUnstructuredGrid: """Apply Paraview merge block filter. Args: @@ -607,6 +582,6 @@ def mergeFilterPV( """ mergeFilter: vtkMergeBlocks = vtkMergeBlocks() - mergeFilter.SetInputData(input) + mergeFilter.SetInputData( input ) mergeFilter.Update() - return mergeFilter.GetOutputDataObject(0) + return mergeFilter.GetOutputDataObject( 0 ) From 341613c4dd3bacef91394670fb5e5c5f17ec84fe Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Thu, 3 Apr 2025 15:56:12 +0200 Subject: [PATCH 04/20] update doc --- docs/conf.py | 4 +-- docs/geos-pv.rst | 12 +++++++ docs/geos_pv_docs/PVplugins.rst | 12 +++++++ docs/geos_pv_docs/geosLogReaderUtils.rst | 45 ++++++++++++++++++++++++ docs/geos_pv_docs/home.rst | 19 ++++++++++ docs/geos_pv_docs/modules.rst | 11 ++++++ docs/geos_pv_docs/pyplotUtils.rst | 13 +++++++ docs/geos_pv_docs/utils.rst | 29 +++++++++++++++ 8 files changed, 143 insertions(+), 2 deletions(-) create mode 100644 docs/geos-pv.rst create mode 100644 docs/geos_pv_docs/PVplugins.rst create mode 100644 docs/geos_pv_docs/geosLogReaderUtils.rst create mode 100644 docs/geos_pv_docs/home.rst create mode 100644 docs/geos_pv_docs/modules.rst create mode 100644 docs/geos_pv_docs/pyplotUtils.rst create mode 100644 docs/geos_pv_docs/utils.rst diff --git a/docs/conf.py b/docs/conf.py index 8e22d041..ce808511 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -18,7 +18,7 @@ # Add python modules to be documented python_root = '..' python_modules = ( 'geos-ats', 'geos-mesh', 'geos-posp', 'geos-timehistory', 'geos-utils', 'geos-xml-tools', - 'hdf5-wrapper', 'pygeos-tools' ) + 'hdf5-wrapper', 'pygeos-tools', 'geos-pv' ) for m in python_modules: sys.path.insert( 0, os.path.abspath( os.path.join( python_root, m, 'src' ) ) ) @@ -47,7 +47,7 @@ 'sphinx.ext.todo', 'sphinx.ext.viewcode' ] autoclass_content = 'both' -autodoc_mock_imports = [ "ats", "h5py", "lxml", "paraview", "pygeosx", "pylvarray", "meshio", "mpi4py", "scipy" ] +autodoc_mock_imports = [ "ats", "h5py", "lxml", "paraview", "pygeosx", "pylvarray", "meshio", "mpi4py", "scipy", "pandas", "matplotlib" ] autodoc_typehints = 'none' autodoc_typehints_format = 'short' suppress_warnings = [""] diff --git a/docs/geos-pv.rst b/docs/geos-pv.rst new file mode 100644 index 00000000..4fbc70dd --- /dev/null +++ b/docs/geos-pv.rst @@ -0,0 +1,12 @@ +GEOS Paraview plugins +===================== + +.. toctree:: + :maxdepth: 5 + :caption: Contents: + + ./geos_pv_docs/home.rst + + ./geos_pv_docs/modules.rst + + ./geos_pv_docs/PVplugins.rst \ No newline at end of file diff --git a/docs/geos_pv_docs/PVplugins.rst b/docs/geos_pv_docs/PVplugins.rst new file mode 100644 index 00000000..a46fd982 --- /dev/null +++ b/docs/geos_pv_docs/PVplugins.rst @@ -0,0 +1,12 @@ +Paraview plugins +================ + +This package contains Paraview plugins that can be loaded in Paraview. + +PVplugins.PVGeosLogReader module +--------------------------------------------- + +.. automodule:: PVplugins.PVGeosLogReader + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/docs/geos_pv_docs/geosLogReaderUtils.rst b/docs/geos_pv_docs/geosLogReaderUtils.rst new file mode 100644 index 00000000..181d8c34 --- /dev/null +++ b/docs/geos_pv_docs/geosLogReaderUtils.rst @@ -0,0 +1,45 @@ +GeosLogReaderUtils functions +============================ + +This package define functions dedicated to the GeosLogReader. + + +geos.pv.GeosLogReaderUtils.GeosLogReaderAquifers module +------------------------------------------------------------------- + +.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderAquifers + :members: + :undoc-members: + :show-inheritance: + +geos.pv.GeosLogReaderUtils.GeosLogReaderConvergence module +--------------------------------------------------------------------- + +.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderConvergence + :members: + :undoc-members: + :show-inheritance: + +geos.pv.GeosLogReaderUtils.GeosLogReaderFlow module +--------------------------------------------------------------- + +.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderFlow + :members: + :undoc-members: + :show-inheritance: + +geos.pv.GeosLogReaderUtils.GeosLogReaderFunctions module +-------------------------------------------------------------------- + +.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderFunctions + :members: + :undoc-members: + :show-inheritance: + +geos.pv.GeosLogReaderUtils.GeosLogReaderWells module +-------------------------------------------------------------------- + +.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderWells + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/docs/geos_pv_docs/home.rst b/docs/geos_pv_docs/home.rst new file mode 100644 index 00000000..1e42ae41 --- /dev/null +++ b/docs/geos_pv_docs/home.rst @@ -0,0 +1,19 @@ +Home +==== + +**geos-pv** is a Python package that gathers `Paraview `_ plugins of GEOS python tools. + +It includes: + +* a reader able to parse the GEOS output log (before commit version #9365098) to collect data and display them as tables; +* tools to clean and check GEOS input mesh; +* tools to clean GEOS output mesh; +* tools to compute additional geomechanical properties from GEOS outputs; +* tools to display Mohr's circles at a given time step and the evolution through time from GEOS outputs. + +The packages can be loaded into Paraview using the Plugin Manager from `Tools > Plugin Manager`. On success, you will +see the selected plugin in the `Filters`` menu (see `Paraview documentation `. + +Alternatively, geos-pv package can be build together with Paraview ([see Paraview compilation guide](https://gitlab.kitware.com/paraview/paraview/-/blob/master/Documentation/dev/build.md)). +It is recommended to use Paraview v5.12+, which is based on python 3.10+. If you need to build geos-pv package with the paraview dependency, use the command: +`pip install Path/To/geosPythonPackages/geos-pv[paraview]` diff --git a/docs/geos_pv_docs/modules.rst b/docs/geos_pv_docs/modules.rst new file mode 100644 index 00000000..2a9bdca1 --- /dev/null +++ b/docs/geos_pv_docs/modules.rst @@ -0,0 +1,11 @@ +Processing +========== + +.. toctree:: + :maxdepth: 5 + + geosLogReaderUtils + + pyplotUtils + + utils \ No newline at end of file diff --git a/docs/geos_pv_docs/pyplotUtils.rst b/docs/geos_pv_docs/pyplotUtils.rst new file mode 100644 index 00000000..6b2b36c4 --- /dev/null +++ b/docs/geos_pv_docs/pyplotUtils.rst @@ -0,0 +1,13 @@ +pyplotUtils functions +===================== + +This package define options for matplotlib. + + +geos.pv.pyplotUtils.matplotlibOptions module +--------------------------------------------- + +.. automodule:: geos.pv.pyplotUtils.matplotlibOptions + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/docs/geos_pv_docs/utils.rst b/docs/geos_pv_docs/utils.rst new file mode 100644 index 00000000..e0a3982a --- /dev/null +++ b/docs/geos_pv_docs/utils.rst @@ -0,0 +1,29 @@ +Utilities +========= + +This package defines utilities for Paraview plugins. + + +geos.pv.utils.checkboxFunction module +--------------------------------------------- + +.. automodule:: geos.pv.utils.checkboxFunction + :members: + :undoc-members: + :show-inheritance: + +geos.pv.utils.DisplayOrganizationParaview module +--------------------------------------------- + +.. automodule:: geos.pv.utils.DisplayOrganizationParaview + :members: + :undoc-members: + :show-inheritance: + +geos.pv.utils.paraviewTreatments module +--------------------------------------------- + +.. automodule:: geos.pv.utils.paraviewTreatments + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file From 8b417e3e38ab4bdb1d1a7e74639596589dea48bc Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Thu, 3 Apr 2025 15:57:15 +0200 Subject: [PATCH 05/20] fix doc --- docs/geos_posp_docs/PVplugins.rst | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/docs/geos_posp_docs/PVplugins.rst b/docs/geos_posp_docs/PVplugins.rst index 57687c6a..86f7261e 100644 --- a/docs/geos_posp_docs/PVplugins.rst +++ b/docs/geos_posp_docs/PVplugins.rst @@ -87,13 +87,6 @@ PVplugins.PVGeomechanicsWorkflowVolumeWell module :members: :undoc-members: -PVplugins.PVGeosLogReader module --------------------------------- - -.. automodule:: PVplugins.PVGeosLogReader - :members: - :undoc-members: - PVplugins.PVMergeBlocksEnhanced module -------------------------------------- @@ -108,13 +101,6 @@ PVplugins.PVMohrCirclePlot module :members: :undoc-members: -PVplugins.PVPythonViewConfigurator module ------------------------------------------ - -.. automodule:: PVplugins.PVPythonViewConfigurator - :members: - :undoc-members: - PVplugins.PVSurfaceGeomechanics module -------------------------------------- From 078808e8aa26a566bc4a6b359fd440fd6347ca0b Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Thu, 3 Apr 2025 16:00:37 +0200 Subject: [PATCH 06/20] update project config --- geos-pv/pyproject.toml | 8 +++----- geos-pv/requirements.txt | 10 +++++----- geos-pv/setup.py | 18 +++++++++++------- geos-pv/src/{geos_pv => geos/pv}/__init__.py | 0 .../GeosLogReaderAquifers.py | 4 ++-- .../GeosLogReaderConvergence.py | 4 ++-- .../geosLogReaderUtils/GeosLogReaderFlow.py | 4 ++-- .../geosLogReaderUtils/GeosLogReaderWells.py | 4 ++-- .../pv}/geosLogReaderUtils/__init__.py | 0 .../geosLogReaderFunctions.py | 2 +- .../pv}/pyplotUtils/__init__.py | 0 .../pv}/pyplotUtils/matplotlibOptions.py | 0 .../pv}/utils/DisplayOrganizationParaview.py | 0 .../src/{geos_pv => geos/pv}/utils/__init__.py | 0 .../pv}/utils/checkboxFunction.py | 0 .../pv}/utils/paraviewTreatments.py | 0 geos-pv/src/geos_pv/py.typed | 0 17 files changed, 28 insertions(+), 26 deletions(-) rename geos-pv/src/{geos_pv => geos/pv}/__init__.py (100%) rename geos-pv/src/{geos_pv => geos/pv}/geosLogReaderUtils/GeosLogReaderAquifers.py (98%) rename geos-pv/src/{geos_pv => geos/pv}/geosLogReaderUtils/GeosLogReaderConvergence.py (98%) rename geos-pv/src/{geos_pv => geos/pv}/geosLogReaderUtils/GeosLogReaderFlow.py (96%) rename geos-pv/src/{geos_pv => geos/pv}/geosLogReaderUtils/GeosLogReaderWells.py (99%) rename geos-pv/src/{geos_pv => geos/pv}/geosLogReaderUtils/__init__.py (100%) rename geos-pv/src/{geos_pv => geos/pv}/geosLogReaderUtils/geosLogReaderFunctions.py (97%) rename geos-pv/src/{geos_pv => geos/pv}/pyplotUtils/__init__.py (100%) rename geos-pv/src/{geos_pv => geos/pv}/pyplotUtils/matplotlibOptions.py (100%) rename geos-pv/src/{geos_pv => geos/pv}/utils/DisplayOrganizationParaview.py (100%) rename geos-pv/src/{geos_pv => geos/pv}/utils/__init__.py (100%) rename geos-pv/src/{geos_pv => geos/pv}/utils/checkboxFunction.py (100%) rename geos-pv/src/{geos_pv => geos/pv}/utils/paraviewTreatments.py (100%) delete mode 100644 geos-pv/src/geos_pv/py.typed diff --git a/geos-pv/pyproject.toml b/geos-pv/pyproject.toml index 1b60faac..96e7f14b 100644 --- a/geos-pv/pyproject.toml +++ b/geos-pv/pyproject.toml @@ -20,11 +20,9 @@ requires-python = ">=3.10" dynamic = ["dependencies"] -dependencies = [ - "typing_extensions", - "pandas", - "numpy", -] +[project.optional-dependencies] +# dependency to use if install together with paraview +paraview = ["paraview"] [project.scripts] diff --git a/geos-pv/requirements.txt b/geos-pv/requirements.txt index 4a5b598f..e4726a3c 100644 --- a/geos-pv/requirements.txt +++ b/geos-pv/requirements.txt @@ -1,5 +1,5 @@ -"geos-geomechanics" -"geos-mesh" -"geos-posp" -"geos-prep" -"geos-utils" \ No newline at end of file +geos-geomechanics +geos-mesh +geos-posp +geos-prep +geos-utils \ No newline at end of file diff --git a/geos-pv/setup.py b/geos-pv/setup.py index 0a65b791..9c21a543 100644 --- a/geos-pv/setup.py +++ b/geos-pv/setup.py @@ -4,13 +4,17 @@ # geos python package dependencies are read from requirements.txt # WARNING: only local dependencies must be included in the requirements.txt +geos_pv_path: Path = Path( __file__ ).parent +geos_python_packages_path: Path = geos_pv_path.parent local_package_names = [] -with open( "./requirements.txt" ) as f: +with open( str(geos_pv_path / "requirements.txt") ) as f: local_package_names = f.read().splitlines() -geos_python_packages_path: Path = Path( __file__ ).parent.parent -install_requires = [ - f"{name} @ {(geos_python_packages_path / name).as_uri()}" for name in local_package_names - if ( geos_python_packages_path / name ).exists() -] -setup( install_requires=install_requires ) +install_requires = [] +for name in local_package_names: + if ( geos_python_packages_path / name ).exists(): + install_requires += [f"{name} @ {(geos_python_packages_path / name).as_uri()}"] + else: + install_requires += [name] + +setup( install_requires= install_requires) diff --git a/geos-pv/src/geos_pv/__init__.py b/geos-pv/src/geos/pv/__init__.py similarity index 100% rename from geos-pv/src/geos_pv/__init__.py rename to geos-pv/src/geos/pv/__init__.py diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderAquifers.py similarity index 98% rename from geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderAquifers.py index 32995f2b..f77c439e 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderAquifers.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderAquifers.py @@ -6,8 +6,8 @@ import pandas as pd # type: ignore[import-untyped] from typing_extensions import Self -import geos_pv.geosLogReaderUtils.geosLogReaderFunctions as fcts -from geos_utils.enumUnits import Unit +import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos.utils.enumUnits import Unit class GeosLogReaderAquifers: diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderConvergence.py similarity index 98% rename from geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderConvergence.py index 79fa6d51..f0457b2c 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderConvergence.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderConvergence.py @@ -6,8 +6,8 @@ import pandas as pd # type: ignore[import-untyped] from typing_extensions import Self -import geos_pv.geosLogReaderUtils.geosLogReaderFunctions as fcts -from geos_utils.enumUnits import Unit +import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos.utils.enumUnits import Unit class GeosLogReaderConvergence: diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py similarity index 96% rename from geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py index b84d709c..f7639d80 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderFlow.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py @@ -7,8 +7,8 @@ import pandas as pd # type: ignore[import-untyped] from typing_extensions import Self -import geos_pv.geosLogReaderUtils.geosLogReaderFunctions as fcts -from geos_utils.enumUnits import Unit +import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos.utils.enumUnits import Unit class GeosLogReaderFlow: diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderWells.py similarity index 99% rename from geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderWells.py index eead1c95..eddf5fc3 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/GeosLogReaderWells.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderWells.py @@ -7,8 +7,8 @@ import pandas as pd # type: ignore[import-untyped] from typing_extensions import Self -import geos_pv.geosLogReaderUtils.geosLogReaderFunctions as fcts -from geos_utils.enumUnits import Unit +import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos.utils.enumUnits import Unit class GeosLogReaderWells: diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/__init__.py b/geos-pv/src/geos/pv/geosLogReaderUtils/__init__.py similarity index 100% rename from geos-pv/src/geos_pv/geosLogReaderUtils/__init__.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/__init__.py diff --git a/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py b/geos-pv/src/geos/pv/geosLogReaderUtils/geosLogReaderFunctions.py similarity index 97% rename from geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/geosLogReaderFunctions.py index 36a2c531..834a6850 100644 --- a/geos-pv/src/geos_pv/geosLogReaderUtils/geosLogReaderFunctions.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/geosLogReaderFunctions.py @@ -7,7 +7,7 @@ from copy import deepcopy from typing import Any, Union -from geos_utils.enumUnits import Unit, convert +from geos.utils.enumUnits import Unit, convert __doc__ = """Functions to read and process Geos log.""" diff --git a/geos-pv/src/geos_pv/pyplotUtils/__init__.py b/geos-pv/src/geos/pv/pyplotUtils/__init__.py similarity index 100% rename from geos-pv/src/geos_pv/pyplotUtils/__init__.py rename to geos-pv/src/geos/pv/pyplotUtils/__init__.py diff --git a/geos-pv/src/geos_pv/pyplotUtils/matplotlibOptions.py b/geos-pv/src/geos/pv/pyplotUtils/matplotlibOptions.py similarity index 100% rename from geos-pv/src/geos_pv/pyplotUtils/matplotlibOptions.py rename to geos-pv/src/geos/pv/pyplotUtils/matplotlibOptions.py diff --git a/geos-pv/src/geos_pv/utils/DisplayOrganizationParaview.py b/geos-pv/src/geos/pv/utils/DisplayOrganizationParaview.py similarity index 100% rename from geos-pv/src/geos_pv/utils/DisplayOrganizationParaview.py rename to geos-pv/src/geos/pv/utils/DisplayOrganizationParaview.py diff --git a/geos-pv/src/geos_pv/utils/__init__.py b/geos-pv/src/geos/pv/utils/__init__.py similarity index 100% rename from geos-pv/src/geos_pv/utils/__init__.py rename to geos-pv/src/geos/pv/utils/__init__.py diff --git a/geos-pv/src/geos_pv/utils/checkboxFunction.py b/geos-pv/src/geos/pv/utils/checkboxFunction.py similarity index 100% rename from geos-pv/src/geos_pv/utils/checkboxFunction.py rename to geos-pv/src/geos/pv/utils/checkboxFunction.py diff --git a/geos-pv/src/geos_pv/utils/paraviewTreatments.py b/geos-pv/src/geos/pv/utils/paraviewTreatments.py similarity index 100% rename from geos-pv/src/geos_pv/utils/paraviewTreatments.py rename to geos-pv/src/geos/pv/utils/paraviewTreatments.py diff --git a/geos-pv/src/geos_pv/py.typed b/geos-pv/src/geos_pv/py.typed deleted file mode 100644 index e69de29b..00000000 From 18123f0cbf03c8db4c861b48623887421431fd03 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Thu, 3 Apr 2025 16:32:39 +0200 Subject: [PATCH 07/20] Manage PV plugins dependency --- README.md | 15 +++++++++++++- geos-pv/src/PVplugins/PVGeosLogReader.py | 26 ++++++++++++------------ geos-pv/src/PVplugins/__init__.py | 15 -------------- geos-pv/src/geos/pv/utils/config.py | 19 +++++++++++++++++ 4 files changed, 46 insertions(+), 29 deletions(-) delete mode 100644 geos-pv/src/PVplugins/__init__.py create mode 100644 geos-pv/src/geos/pv/utils/config.py diff --git a/README.md b/README.md index 9d45d82f..0afcadaf 100644 --- a/README.md +++ b/README.md @@ -142,7 +142,7 @@ If you would like to contribute to GEOS Python packages, please respect the foll 1. Create a new branch named from this template: `[CONTRIBUTOR]/[TYPE]/[TITLE]` where CONTRIBUTOR is the name of the contributor, TYPE is the type of contribution among 'feature', 'refactor', 'doc', 'ci', TITLE is a short title for the branch. 1. Add your code trying to integrate into the current code architecture. -1. Push the branch, open a new PR, and add reviewers +1. Push the branch, open a new PR respecting naming [semantics](https://gist.github.com/joshbuchea/6f47e86d2510bce28f8e7f42ae84c716), and add reviewers If you do not have the rights to push the code and open new PRs, consider opening a new issue to explain what you want to do and ask for the dev rights. @@ -186,6 +186,19 @@ The *setup.py* file is optional. It is required if the package depends on anothe ) ``` +If you want to add new Paraview plugins, create the plugins in the `geos-pv/src/PVplugins` directory. +If the plugin depends on another GEOS python package, add the package name to `geos-pv/requirements.txt` and the following lines to the top of your plugin: + +``` +import sys +from pathlib import Path +# update sys.path to load all GEOS Python Package dependencies +geos_pv_path: Path = Path( __file__ ).parent.parent.parent +sys.path.insert( 0, str(geos_pv_path / "src") ) +from geos.pv.utils.config import update_paths +update_paths() +``` + Release ------- diff --git a/geos-pv/src/PVplugins/PVGeosLogReader.py b/geos-pv/src/PVplugins/PVGeosLogReader.py index 09659d71..41a8febb 100644 --- a/geos-pv/src/PVplugins/PVGeosLogReader.py +++ b/geos-pv/src/PVplugins/PVGeosLogReader.py @@ -4,6 +4,7 @@ # ruff: noqa: E402 # disable Module level import not at top of file import os import sys +from pathlib import Path from enum import Enum from typing import Union, cast @@ -12,12 +13,11 @@ import pandas as pd # type: ignore[import-untyped] from typing_extensions import Self -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.dirname( dir_path ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - -import PVplugins #required to update sys path +# update sys.path to load all GEOS Python Package dependencies +geos_pv_path: Path = Path( __file__ ).parent.parent.parent +sys.path.insert( 0, str(geos_pv_path / "src") ) +from geos.pv.utils.config import update_paths +update_paths() import vtkmodules.util.numpy_support as vnp from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] @@ -32,15 +32,15 @@ ) from vtkmodules.vtkCommonDataModel import vtkTable -from geos_pv.geosLogReaderUtils.geosLogReaderFunctions import ( +from geos.pv.geosLogReaderUtils.geosLogReaderFunctions import ( identifyProperties, transformUserChoiceToListPhases, ) -from geos_pv.geosLogReaderUtils.GeosLogReaderAquifers import GeosLogReaderAquifers -from geos_pv.geosLogReaderUtils.GeosLogReaderConvergence import GeosLogReaderConvergence -from geos_pv.geosLogReaderUtils.GeosLogReaderFlow import GeosLogReaderFlow -from geos_pv.geosLogReaderUtils.GeosLogReaderWells import GeosLogReaderWells +from geos.pv.geosLogReaderUtils.GeosLogReaderAquifers import GeosLogReaderAquifers +from geos.pv.geosLogReaderUtils.GeosLogReaderConvergence import GeosLogReaderConvergence +from geos.pv.geosLogReaderUtils.GeosLogReaderFlow import GeosLogReaderFlow +from geos.pv.geosLogReaderUtils.GeosLogReaderWells import GeosLogReaderWells from geos.utils.enumUnits import ( Mass, MassRate, @@ -53,9 +53,9 @@ ) from geos.utils.UnitRepository import UnitRepository -from geos_pv.utils.checkboxFunction import ( # type: ignore[attr-defined] +from geos.pv.utils.checkboxFunction import ( # type: ignore[attr-defined] createModifiedCallback, ) -from geos_pv.utils.paraviewTreatments import ( +from geos.pv.utils.paraviewTreatments import ( strListToEnumerationDomainXml, ) __doc__ = """ diff --git a/geos-pv/src/PVplugins/__init__.py b/geos-pv/src/PVplugins/__init__.py deleted file mode 100644 index c646673a..00000000 --- a/geos-pv/src/PVplugins/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -import os -import sys - -# Add other packages path to sys path -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -python_root = '../../..' - -python_modules = [ "geos-pv" ] -with open( "./requirements.txt" ) as f: - python_modules += f.read().splitlines() - -for m in python_modules: - m_path = os.path.abspath( os.path.join( dir_path, python_root, m, 'src' ) ) - if m_path not in sys.path: - sys.path.insert( 0, m_path ) diff --git a/geos-pv/src/geos/pv/utils/config.py b/geos-pv/src/geos/pv/utils/config.py new file mode 100644 index 00000000..0abc9804 --- /dev/null +++ b/geos-pv/src/geos/pv/utils/config.py @@ -0,0 +1,19 @@ +import sys +from pathlib import Path + +def update_paths() ->None: + """Update sys path to load GEOS Python packages. """ + # Add other packages path to sys path + geos_pv_path: Path = Path( __file__ ).parent.parent.parent.parent.parent + geos_python_packages_path: Path = geos_pv_path.parent + + python_modules = [ "geos-pv" ] + with open( str(geos_pv_path / "requirements.txt") ) as f: + python_modules += f.read().splitlines() + + for m in python_modules: + if not ( geos_python_packages_path / m ).exists(): + continue + m_path = str( geos_python_packages_path / m / "src") + if m_path not in sys.path: + sys.path.insert( 0, m_path ) \ No newline at end of file From 9ee93a68ede4710d7ca7c3b6c7c5d07cf10a4ab7 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Thu, 3 Apr 2025 16:37:38 +0200 Subject: [PATCH 08/20] typing and linting fix --- geos-pv/setup.py | 10 +++++----- geos-pv/src/PVplugins/PVGeosLogReader.py | 8 +++++--- geos-pv/src/geos/pv/utils/config.py | 11 ++++++----- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/geos-pv/setup.py b/geos-pv/setup.py index 9c21a543..b9ebf3b8 100644 --- a/geos-pv/setup.py +++ b/geos-pv/setup.py @@ -7,14 +7,14 @@ geos_pv_path: Path = Path( __file__ ).parent geos_python_packages_path: Path = geos_pv_path.parent local_package_names = [] -with open( str(geos_pv_path / "requirements.txt") ) as f: +with open( str( geos_pv_path / "requirements.txt" ) ) as f: local_package_names = f.read().splitlines() install_requires = [] for name in local_package_names: if ( geos_python_packages_path / name ).exists(): - install_requires += [f"{name} @ {(geos_python_packages_path / name).as_uri()}"] + install_requires += [ f"{name} @ {(geos_python_packages_path / name).as_uri()}" ] else: - install_requires += [name] - -setup( install_requires= install_requires) + install_requires += [ name ] + +setup( install_requires=install_requires ) diff --git a/geos-pv/src/PVplugins/PVGeosLogReader.py b/geos-pv/src/PVplugins/PVGeosLogReader.py index 41a8febb..e36d4afb 100644 --- a/geos-pv/src/PVplugins/PVGeosLogReader.py +++ b/geos-pv/src/PVplugins/PVGeosLogReader.py @@ -15,8 +15,9 @@ # update sys.path to load all GEOS Python Package dependencies geos_pv_path: Path = Path( __file__ ).parent.parent.parent -sys.path.insert( 0, str(geos_pv_path / "src") ) +sys.path.insert( 0, str( geos_pv_path / "src" ) ) from geos.pv.utils.config import update_paths + update_paths() import vtkmodules.util.numpy_support as vnp @@ -146,8 +147,9 @@ def __init__( self: Self ) -> None: self.m_propertiesWells.AddArray( prop ) self.m_propertiesAquifers: vtkDAS = vtkDAS() - self.m_propertiesAquifers.AddObserver( "ModifiedEvent", - createModifiedCallback( self ) ) # type: ignore[arg-type] + self.m_propertiesAquifers.AddObserver( + "ModifiedEvent", # type: ignore[arg-type] + createModifiedCallback( self ) ) propsAquifers: list[ str ] = [ "Volume", "VolumetricRate", diff --git a/geos-pv/src/geos/pv/utils/config.py b/geos-pv/src/geos/pv/utils/config.py index 0abc9804..08e95e57 100644 --- a/geos-pv/src/geos/pv/utils/config.py +++ b/geos-pv/src/geos/pv/utils/config.py @@ -1,19 +1,20 @@ import sys from pathlib import Path -def update_paths() ->None: + +def update_paths() -> None: """Update sys path to load GEOS Python packages. """ # Add other packages path to sys path geos_pv_path: Path = Path( __file__ ).parent.parent.parent.parent.parent geos_python_packages_path: Path = geos_pv_path.parent - python_modules = [ "geos-pv" ] - with open( str(geos_pv_path / "requirements.txt") ) as f: + python_modules: list[ str ] = [ "geos-pv" ] + with open( str( geos_pv_path / "requirements.txt" ) ) as f: python_modules += f.read().splitlines() for m in python_modules: if not ( geos_python_packages_path / m ).exists(): continue - m_path = str( geos_python_packages_path / m / "src") + m_path: str = str( geos_python_packages_path / m / "src" ) if m_path not in sys.path: - sys.path.insert( 0, m_path ) \ No newline at end of file + sys.path.insert( 0, m_path ) From 0e542e1d872beb899d13f8d4733078ce109211d2 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Mon, 7 Apr 2025 13:15:26 +0200 Subject: [PATCH 09/20] add .vscode to .gitignore --- .gitignore | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index a47b528f..5f08477d 100644 --- a/.gitignore +++ b/.gitignore @@ -162,4 +162,7 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ \ No newline at end of file +#.idea/ + +# VSCode +.vscode \ No newline at end of file From e39555fc973e34bb8d8442c44a64dabbc0091e48 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Mon, 7 Apr 2025 13:23:53 +0200 Subject: [PATCH 10/20] linting and formatting --- geos-posp/src/PVplugins/PVAttributeMapping.py | 2 - .../PVCreateConstantAttributePerRegion.py | 2 - .../PVplugins/PVExtractMergeBlocksVolume.py | 4 +- .../PVExtractMergeBlocksVolumeSurface.py | 2 - .../PVExtractMergeBlocksVolumeSurfaceWell.py | 3 - .../PVExtractMergeBlocksVolumeWell.py | 3 - .../src/PVplugins/PVGeomechanicsAnalysis.py | 3 - .../PVplugins/PVGeomechanicsWorkflowVolume.py | 2 - .../PVGeomechanicsWorkflowVolumeSurface.py | 2 - ...PVGeomechanicsWorkflowVolumeSurfaceWell.py | 2 - .../PVGeomechanicsWorkflowVolumeWell.py | 5 -- .../src/PVplugins/PVMergeBlocksEnhanced.py | 2 - .../src/PVplugins/PVSurfaceGeomechanics.py | 2 - .../PVTransferAttributesVolumeSurface.py | 3 - .../filters/AttributeMappingFromCellId.py | 4 -- .../filters/GeomechanicsCalculator.py | 9 --- .../geos_posp/filters/SurfaceGeomechanics.py | 13 ----- .../filters/VolumeSurfaceMeshMapper.py | 13 ----- .../visu/PVUtils/paraviewTreatments.py | 4 +- .../tests/testsFunctionsGeosLogReader.py | 2 +- geos-pv/src/PVplugins/PVGeosLogReader.py | 58 ------------------- .../geosLogReaderUtils/GeosLogReaderFlow.py | 1 - geos-pv/src/geos/pv/utils/config.py | 2 +- .../src/geos/pv/utils/paraviewTreatments.py | 6 +- geos-xml-viewer/src/PVplugins/deckReader.py | 2 +- 25 files changed, 7 insertions(+), 144 deletions(-) diff --git a/geos-posp/src/PVplugins/PVAttributeMapping.py b/geos-posp/src/PVplugins/PVAttributeMapping.py index 7d76feda..8a304708 100644 --- a/geos-posp/src/PVplugins/PVAttributeMapping.py +++ b/geos-posp/src/PVplugins/PVAttributeMapping.py @@ -13,8 +13,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from geos.utils.Logger import Logger, getLogger from geos_posp.filters.AttributeMappingFromCellCoords import ( AttributeMappingFromCellCoords, ) diff --git a/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py b/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py index 8cd81a06..e49c61e9 100644 --- a/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py +++ b/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py @@ -15,8 +15,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - import vtkmodules.util.numpy_support as vnp from geos.utils.Logger import Logger, getLogger from geos_posp.processing.multiblockInpectorTreeFunctions import ( diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py index c3ebf0f5..50d1d4a8 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py @@ -2,6 +2,8 @@ # SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. # SPDX-FileContributor: Martin Lemay # ruff: noqa: E402 # disable Module level import not at top of file +import os +import sys import numpy as np import numpy.typing as npt from typing_extensions import Self @@ -13,8 +15,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py index e361e31e..22477abf 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py @@ -16,8 +16,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py index 71d531fa..7aaabc5a 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py @@ -16,9 +16,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - - from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py index de744730..43882eac 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py @@ -19,9 +19,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - - from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py b/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py index 54c84d62..ad78eb0e 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py @@ -19,9 +19,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - - from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py index 1e92e952..2807152c 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py @@ -16,8 +16,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py index 2ee62e5f..b92a136e 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py @@ -16,8 +16,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, ) diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py index 93e45346..03cdc1d7 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py @@ -16,8 +16,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, ) diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py index 9a73acb3..93c0c416 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py @@ -15,8 +15,6 @@ parent_dir_path = os.path.dirname( dir_path ) if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) - -import PVplugins #required to update sys path from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, @@ -30,9 +28,6 @@ DEFAULT_ROCK_COHESION, WATER_DENSITY, ) -from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, -) from PVplugins.PVExtractMergeBlocksVolumeWell import ( PVExtractMergeBlocksVolumeWell, ) diff --git a/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py b/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py index fc7236c7..bbbd9696 100644 --- a/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py +++ b/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py @@ -13,8 +13,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from geos.utils.Logger import Logger, getLogger from geos_posp.processing.vtkUtils import mergeBlocks from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] diff --git a/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py b/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py index e276ae89..85aa088f 100644 --- a/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py +++ b/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py @@ -13,8 +13,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py b/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py index aeb0d083..1783ddbd 100644 --- a/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py @@ -12,8 +12,6 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins #required to update sys path - from geos.utils.Logger import Logger, getLogger from geos_posp.filters.TransferAttributesVolumeSurface import ( TransferAttributesVolumeSurface, ) @@ -45,7 +43,6 @@ vtkUnstructuredGrid, ) - __doc__ = """ PVTransferAttributesVolumeSurface is a Paraview plugin that allows to map face ids from surface mesh with cell ids from volume mesh. diff --git a/geos-posp/src/geos_posp/filters/AttributeMappingFromCellId.py b/geos-posp/src/geos_posp/filters/AttributeMappingFromCellId.py index 2c6695f8..aa205f16 100644 --- a/geos-posp/src/geos_posp/filters/AttributeMappingFromCellId.py +++ b/geos-posp/src/geos_posp/filters/AttributeMappingFromCellId.py @@ -102,11 +102,7 @@ def RequestDataObject( if outData is None or ( not outData.IsA( inData.GetClassName() ) ): outData = inData.NewInstance() outInfoVec.GetInformationObject( 0 ).Set( outData.DATA_OBJECT(), outData ) -<<<<<<< HEAD return super().RequestDataObject( request, inInfoVec, outInfoVec ) # type: ignore[no-any-return] -======= - return super().RequestDataObject( request, inInfoVec, outInfoVec ) # type: ignore ->>>>>>> main def RequestData( self: Self, diff --git a/geos-posp/src/geos_posp/filters/GeomechanicsCalculator.py b/geos-posp/src/geos_posp/filters/GeomechanicsCalculator.py index 05a8d881..cdd44a34 100644 --- a/geos-posp/src/geos_posp/filters/GeomechanicsCalculator.py +++ b/geos-posp/src/geos_posp/filters/GeomechanicsCalculator.py @@ -961,17 +961,8 @@ def computeLitostaticStress( self: Self ) -> bool: density: npt.NDArray[ np.float64 ] = getArrayInObject( self.m_output, densityAttributeName, self.m_attributeOnPoints ) try: -<<<<<<< HEAD - depth: npt.NDArray[ np.float64 ] - if self.m_attributeOnPoints: - depth = self.computeDepthAlongLine() - else: - depth = self.computeDepthInMesh() - -======= depth: npt.NDArray[ np.float64 ] = self.computeDepthAlongLine( ) if self.m_attributeOnPoints else self.computeDepthInMesh() ->>>>>>> main assert depth is not None, "Depth is undefined." assert density is not None, ( f"{densityAttributeName}" + UNDEFINED_ATTRIBUTE_MESSAGE ) diff --git a/geos-posp/src/geos_posp/filters/SurfaceGeomechanics.py b/geos-posp/src/geos_posp/filters/SurfaceGeomechanics.py index 81086cde..849b14e4 100644 --- a/geos-posp/src/geos_posp/filters/SurfaceGeomechanics.py +++ b/geos-posp/src/geos_posp/filters/SurfaceGeomechanics.py @@ -32,11 +32,6 @@ ) from vtkmodules.vtkCommonDataModel import ( vtkPolyData, ) -<<<<<<< HEAD - -import geos.geomechanics.processing.geomechanicsCalculatorFunctions as fcts -======= ->>>>>>> main from geos_posp.processing.vtkUtils import ( createAttribute, @@ -407,11 +402,7 @@ def computeChangeOfBasisMatrix( self: Self, localBasis: npt.NDArray[ np.float64 if fromLocalToYXZ: return P # inverse the change of basis matrix -<<<<<<< HEAD - return np.linalg.inv( P ) -======= return np.linalg.inv( P ).astype( np.float64 ) ->>>>>>> main def getNormalTangentsVectors( self: Self ) -> npt.NDArray[ np.float64 ]: """Compute the change of basis matrix from Local to XYZ bases. @@ -428,11 +419,7 @@ def getNormalTangentsVectors( self: Self ) -> npt.NDArray[ np.float64 ]: assert tangents1 is not None, "Tangents attribute was not found." # compute second tangential component -<<<<<<< HEAD - tangents2: npt.NDArray[ np.float64 ] = np.cross( normals, tangents1, axis=1 ) -======= tangents2: npt.NDArray[ np.float64 ] = np.cross( normals, tangents1, axis=1 ).astype( np.float64 ) ->>>>>>> main assert tangents2 is not None, "Local basis third axis was not computed." # put vectors as columns diff --git a/geos-posp/src/geos_posp/filters/VolumeSurfaceMeshMapper.py b/geos-posp/src/geos_posp/filters/VolumeSurfaceMeshMapper.py index e7c66194..c4508bcc 100644 --- a/geos-posp/src/geos_posp/filters/VolumeSurfaceMeshMapper.py +++ b/geos-posp/src/geos_posp/filters/VolumeSurfaceMeshMapper.py @@ -28,19 +28,6 @@ ) from vtkmodules.vtkCommonDataModel import vtkPolyData, vtkUnstructuredGrid -<<<<<<< HEAD -from geos.utils.ConnectionSet import ( - ConnectionSet, - ConnectionSetCollection, -) -from geos.utils.geometryFunctions import getCellSideAgainstPlane -from geos.utils.GeosOutputsConstants import PostProcessingOutputsEnum -from geos.utils.Logger import Logger, getLogger -from geos.utils.PhysicalConstants import ( - EPSILON, ) - -======= ->>>>>>> main __doc__ = """ VolumeSurfaceMeshMapper is a vtk filter that collects the cell of a volume mesh adjacents to the faces of a surface mesh. diff --git a/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py b/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py index 1931a57d..ca36a4b9 100644 --- a/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py +++ b/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py @@ -17,7 +17,6 @@ from paraview.simple import ( # type: ignore[import-not-found] FindSource, GetActiveView, GetAnimationScene, GetDisplayProperties, GetSources, servermanager, ) -import vtkmodules.util.numpy_support as vnp from vtkmodules.vtkCommonCore import ( vtkDataArray, vtkDataArraySelection, @@ -25,7 +24,6 @@ vtkPoints, ) from vtkmodules.vtkCommonDataModel import ( - vtkCellData, vtkCompositeDataSet, vtkDataObject, vtkMultiBlockDataSet, @@ -470,7 +468,7 @@ def getVtkOriginalCellIds( mesh: Union[ vtkMultiBlockDataSet, vtkCompositeDataSe list[str]: ids of the cells. """ # merge blocks for vtkCompositeDataSet - mesh2: vtkUnstructuredGrid = mergeFilterPV( mesh, True ) + mesh2: vtkUnstructuredGrid = mergeFilterPV( mesh ) name: str = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName assert isAttributeInObject( mesh2, name, False ), f"Attribute {name} is not in the mesh." return [ str( int( ide ) ) for ide in getArrayInObject( mesh2, name, False ) ] diff --git a/geos-posp/tests/testsFunctionsGeosLogReader.py b/geos-posp/tests/testsFunctionsGeosLogReader.py index d68b82a9..3ca47eaf 100644 --- a/geos-posp/tests/testsFunctionsGeosLogReader.py +++ b/geos-posp/tests/testsFunctionsGeosLogReader.py @@ -16,7 +16,7 @@ sys.path.append( parent_dir_path ) from geos.utils.enumUnits import Unit, getSIUnits -from geos_posp.processing import geosLogReaderFunctions as utils +from geos.pv.geosLogReaderUtils import geosLogReaderFunctions as utils class TestsFunctionsGeosLogReader( unittest.TestCase ): diff --git a/geos-pv/src/PVplugins/PVGeosLogReader.py b/geos-pv/src/PVplugins/PVGeosLogReader.py index 0b2c3649..f5b502cc 100644 --- a/geos-pv/src/PVplugins/PVGeosLogReader.py +++ b/geos-pv/src/PVplugins/PVGeosLogReader.py @@ -82,11 +82,7 @@ name="PVGeosLogReader", label="Geos Log Reader", extensions=[ "txt", "out" ], -<<<<<<< HEAD:geos-pv/src/PVplugins/PVGeosLogReader.py - file_description="GEOS log .txt or .out files", -======= file_description="txt and out files of GEOS log files", ->>>>>>> main:geos-posp/src/PVplugins/PVGeosLogReader.py ) class PVGeosLogReader( VTKPythonAlgorithmBase ): @@ -198,13 +194,8 @@ def getFilepath( self: Self ) -> str: @smproperty.stringvector( name="EnterPhaseNames", label="Enter Phase Names", default_values="" ) @smdomain.xml( """ -<<<<<<< HEAD:geos-pv/src/PVplugins/PVGeosLogReader.py - Please enter the phase names as in the form: phase0, phase1, phase2 - """ ) -======= Please enter your phase names as phase0, phase1, phase2. """ ) ->>>>>>> main:geos-posp/src/PVplugins/PVGeosLogReader.py def a02SetPhaseNames( self: Self, value: str ) -> None: """Set phase names. @@ -250,17 +241,10 @@ def getDataframeChoice( self: Self ) -> int: return self.m_dataframeChoice @smproperty.xml( """ -<<<<<<< HEAD:geos-pv/src/PVplugins/PVGeosLogReader.py - - - - """ ) -======= """ ) ->>>>>>> main:geos-posp/src/PVplugins/PVGeosLogReader.py def a04PropertyGroup( self: Self ) -> None: """Organized group.""" self.Modified() @@ -271,17 +255,10 @@ def a05SetPropertiesFlow( self: Self ) -> vtkDAS: return self.m_propertiesFlow @smproperty.xml( """ -<<<<<<< HEAD:geos-pv/src/PVplugins/PVGeosLogReader.py - - - """ ) -======= """ ) ->>>>>>> main:geos-posp/src/PVplugins/PVGeosLogReader.py def a06GroupFlow( self: Self ) -> None: """Organized group.""" self.Modified() @@ -310,19 +287,11 @@ def getNumberOfWellsMean( self: Self ) -> int: return self.m_numberWellsMean @smproperty.xml( """ -<<<<<<< HEAD:geos-pv/src/PVplugins/PVGeosLogReader.py - - - - """ ) -======= """ ) ->>>>>>> main:geos-posp/src/PVplugins/PVGeosLogReader.py def a09GroupWells( self: Self ) -> None: """Organized group.""" self.Modified() @@ -333,17 +302,10 @@ def a10SetPropertiesAquifers( self: Self ) -> vtkDAS: return self.m_propertiesAquifers @smproperty.xml( """ -<<<<<<< HEAD:geos-pv/src/PVplugins/PVGeosLogReader.py - - - """ ) -======= """ ) ->>>>>>> main:geos-posp/src/PVplugins/PVGeosLogReader.py def a11GroupAquifers( self: Self ) -> None: """Organized group.""" self.Modified() @@ -354,17 +316,10 @@ def a12SetConvergence( self: Self ) -> vtkDAS: return self.m_convergence @smproperty.xml( """ -<<<<<<< HEAD:geos-pv/src/PVplugins/PVGeosLogReader.py - - - """ ) -======= """ ) ->>>>>>> main:geos-posp/src/PVplugins/PVGeosLogReader.py def a13GroupSolvers( self: Self ) -> None: """Organized group.""" self.Modified() @@ -493,18 +448,6 @@ def b08SetMassRateUnit( self: Self, value: int ) -> None: self.Modified() @smproperty.xml( """ -<<<<<<< HEAD:geos-pv/src/PVplugins/PVGeosLogReader.py - - - - - - - - - """ ) -======= @@ -515,7 +458,6 @@ def b08SetMassRateUnit( self: Self, value: int ) -> None: """ ) ->>>>>>> main:geos-posp/src/PVplugins/PVGeosLogReader.py def b09GroupUnitsToUse( self: Self ) -> None: """Organize group.""" self.Modified() diff --git a/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py index 643e1002..fae8625a 100644 --- a/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py @@ -9,7 +9,6 @@ from typing_extensions import Self import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts -from geos.utils.enumUnits import Unit class GeosLogReaderFlow: diff --git a/geos-pv/src/geos/pv/utils/config.py b/geos-pv/src/geos/pv/utils/config.py index 08e95e57..96fae896 100644 --- a/geos-pv/src/geos/pv/utils/config.py +++ b/geos-pv/src/geos/pv/utils/config.py @@ -3,7 +3,7 @@ def update_paths() -> None: - """Update sys path to load GEOS Python packages. """ + """Update sys path to load GEOS Python packages.""" # Add other packages path to sys path geos_pv_path: Path = Path( __file__ ).parent.parent.parent.parent.parent geos_python_packages_path: Path = geos_pv_path.parent diff --git a/geos-pv/src/geos/pv/utils/paraviewTreatments.py b/geos-pv/src/geos/pv/utils/paraviewTreatments.py index 7aca71c3..42258ef7 100644 --- a/geos-pv/src/geos/pv/utils/paraviewTreatments.py +++ b/geos-pv/src/geos/pv/utils/paraviewTreatments.py @@ -30,10 +30,6 @@ vtkUnstructuredGrid, ) -from geos_posp.processing.vtkUtils import ( - getArrayInObject, - isAttributeInObject, -) from geos.utils.GeosOutputsConstants import ( ComponentNameEnum, GeosMeshOutputsEnum, @@ -540,7 +536,7 @@ def dataframeForEachTimestep( sourceName: str ) -> dict[ str, pd.DataFrame ]: animationScene.GoToFirst() source = FindSource( sourceName ) dataset: vtkDataObject = servermanager.Fetch( source ) - assert dataset is not None, f"Dataset is undefined." + assert dataset is not None, "Dataset is undefined." dataset2: vtkUnstructuredGrid = mergeFilterPV( dataset ) time: str = str( animationScene.TimeKeeper.Time ) dfPerTimestep: dict[ str, pd.DataFrame ] = { time: vtkToDataframe( dataset2 ) } diff --git a/geos-xml-viewer/src/PVplugins/deckReader.py b/geos-xml-viewer/src/PVplugins/deckReader.py index 53f72bf1..23963457 100644 --- a/geos-xml-viewer/src/PVplugins/deckReader.py +++ b/geos-xml-viewer/src/PVplugins/deckReader.py @@ -1,7 +1,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. # SPDX-FileContributor: Lionel Untereiner -from typing import Self +from typing_extensions import Self from paraview.util.vtkAlgorithm import smdomain, smhint, smproperty, smproxy from vtkmodules.util.vtkAlgorithm import VTKPythonAlgorithmBase From a4f97e15eac24541269f33c5701662a8734348d2 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Mon, 7 Apr 2025 14:04:14 +0200 Subject: [PATCH 11/20] bug fix and settup geos-pv tests --- docs/geos_posp_docs/processing.rst | 33 -- .../src/geos/geomechanics/model/MohrCircle.py | 2 +- .../geomechanicsCalculatorFunctions.py | 2 +- geos-posp/src/PVplugins/PVAttributeMapping.py | 2 + .../PVCreateConstantAttributePerRegion.py | 2 + .../PVplugins/PVExtractMergeBlocksVolume.py | 2 + .../PVExtractMergeBlocksVolumeSurface.py | 2 + .../PVExtractMergeBlocksVolumeSurfaceWell.py | 2 + .../PVExtractMergeBlocksVolumeWell.py | 2 + .../src/PVplugins/PVGeomechanicsAnalysis.py | 2 + .../PVplugins/PVGeomechanicsWorkflowVolume.py | 2 + .../PVGeomechanicsWorkflowVolumeSurface.py | 2 + ...PVGeomechanicsWorkflowVolumeSurfaceWell.py | 2 + .../PVGeomechanicsWorkflowVolumeWell.py | 2 + .../src/PVplugins/PVMergeBlocksEnhanced.py | 2 + geos-posp/src/PVplugins/PVMohrCirclePlot.py | 2 + .../src/PVplugins/PVSurfaceGeomechanics.py | 2 + .../PVTransferAttributesVolumeSurface.py | 2 + .../geos_posp/visu/mohrCircles/__init__.py | 3 +- geos-posp/tests/mainTests.py | 26 -- .../tests/testsFunctionsGeosLogReader.py | 432 ------------------ .../tests/testsGeosLogReaderConvergence.py | 70 --- geos-posp/tests/testsGeosLogReaderWells.py | 228 --------- geos-posp/tests/testsInvalidLogs.py | 59 --- geos-pv/pyproject.toml | 12 +- .../tests/testsFunctionsFigure2DGenerator.py | 184 -------- 26 files changed, 43 insertions(+), 1038 deletions(-) delete mode 100644 geos-posp/tests/mainTests.py delete mode 100644 geos-posp/tests/testsFunctionsGeosLogReader.py delete mode 100644 geos-posp/tests/testsGeosLogReaderConvergence.py delete mode 100644 geos-posp/tests/testsGeosLogReaderWells.py delete mode 100644 geos-posp/tests/testsInvalidLogs.py delete mode 100644 geos-pv/tests/testsFunctionsFigure2DGenerator.py diff --git a/docs/geos_posp_docs/processing.rst b/docs/geos_posp_docs/processing.rst index 9da336b9..d82e7361 100644 --- a/docs/geos_posp_docs/processing.rst +++ b/docs/geos_posp_docs/processing.rst @@ -3,39 +3,6 @@ Processing functions This package define functions to process data. - -geos_posp.processing.geomechanicsCalculatorFunctions module ---------------------------------------------------------------- - -.. automodule:: geos_posp.processing.geomechanicsCalculatorFunctions - :members: - :undoc-members: - :show-inheritance: - -geos_posp.processing.geosLogReaderFunctions module ------------------------------------------------------- - -.. automodule:: geos_posp.processing.geosLogReaderFunctions - :members: - :undoc-members: - :show-inheritance: - -geos_posp.processing.MohrCircle module ------------------------------------------- - -.. automodule:: geos_posp.processing.MohrCircle - :members: - :undoc-members: - :show-inheritance: - -geos_posp.processing.MohrCoulomb module -------------------------------------------- - -.. automodule:: geos_posp.processing.MohrCoulomb - :members: - :undoc-members: - :show-inheritance: - geos_posp.processing.multiblockInpectorTreeFunctions module --------------------------------------------------------------- diff --git a/geos-geomechanics/src/geos/geomechanics/model/MohrCircle.py b/geos-geomechanics/src/geos/geomechanics/model/MohrCircle.py index 2e723767..a917c3de 100644 --- a/geos-geomechanics/src/geos/geomechanics/model/MohrCircle.py +++ b/geos-geomechanics/src/geos/geomechanics/model/MohrCircle.py @@ -5,7 +5,7 @@ import numpy.typing as npt from typing_extensions import Self -from geos_posp.processing.geomechanicsCalculatorFunctions import ( +from geos.geomechanics.processing.geomechanicsCalculatorFunctions import ( computeStressPrincipalComponentsFromStressVector, ) __doc__ = """ diff --git a/geos-geomechanics/src/geos/geomechanics/processing/geomechanicsCalculatorFunctions.py b/geos-geomechanics/src/geos/geomechanics/processing/geomechanicsCalculatorFunctions.py index 174c1726..ab01621b 100644 --- a/geos-geomechanics/src/geos/geomechanics/processing/geomechanicsCalculatorFunctions.py +++ b/geos-geomechanics/src/geos/geomechanics/processing/geomechanicsCalculatorFunctions.py @@ -5,7 +5,7 @@ import numpy as np import numpy.typing as npt -from geos_posp.processing.MohrCoulomb import MohrCoulomb +from geos.geomechanics.model.MohrCoulomb import MohrCoulomb from geos.utils.algebraFunctions import getAttributeMatrixFromVector from geos.utils.PhysicalConstants import ( EPSILON, ) diff --git a/geos-posp/src/PVplugins/PVAttributeMapping.py b/geos-posp/src/PVplugins/PVAttributeMapping.py index 8a304708..edb715ed 100644 --- a/geos-posp/src/PVplugins/PVAttributeMapping.py +++ b/geos-posp/src/PVplugins/PVAttributeMapping.py @@ -13,6 +13,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos_posp.filters.AttributeMappingFromCellCoords import ( AttributeMappingFromCellCoords, ) diff --git a/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py b/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py index e49c61e9..23007575 100644 --- a/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py +++ b/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py @@ -15,6 +15,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + import vtkmodules.util.numpy_support as vnp from geos.utils.Logger import Logger, getLogger from geos_posp.processing.multiblockInpectorTreeFunctions import ( diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py index 50d1d4a8..3295d4c5 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py @@ -15,6 +15,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py index 22477abf..e9927b52 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py index 7aaabc5a..2dd49639 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py index 43882eac..4210128e 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py @@ -19,6 +19,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py b/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py index ad78eb0e..fceb51f0 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py @@ -19,6 +19,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py index 2807152c..f5d44fef 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py index b92a136e..14b7fb24 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, ) diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py index 03cdc1d7..10c7bc73 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, ) diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py index 93c0c416..9733766c 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, ) diff --git a/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py b/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py index bbbd9696..b406a092 100644 --- a/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py +++ b/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py @@ -13,6 +13,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos_posp.processing.vtkUtils import mergeBlocks from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] diff --git a/geos-posp/src/PVplugins/PVMohrCirclePlot.py b/geos-posp/src/PVplugins/PVMohrCirclePlot.py index 90693c3d..8ddc690c 100644 --- a/geos-posp/src/PVplugins/PVMohrCirclePlot.py +++ b/geos-posp/src/PVplugins/PVMohrCirclePlot.py @@ -27,6 +27,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + import geos_posp.visu.mohrCircles.functionsMohrCircle as mcf import geos_posp.visu.PVUtils.paraviewTreatments as pvt from geos.geomechanics.model.MohrCircle import MohrCircle diff --git a/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py b/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py index 85aa088f..f41d0034 100644 --- a/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py +++ b/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py @@ -13,6 +13,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py b/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py index 1783ddbd..2b38d7be 100644 --- a/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py @@ -12,6 +12,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos_posp.filters.TransferAttributesVolumeSurface import ( TransferAttributesVolumeSurface, ) diff --git a/geos-posp/src/geos_posp/visu/mohrCircles/__init__.py b/geos-posp/src/geos_posp/visu/mohrCircles/__init__.py index 4e4c7b57..2aa543df 100644 --- a/geos-posp/src/geos_posp/visu/mohrCircles/__init__.py +++ b/geos-posp/src/geos_posp/visu/mohrCircles/__init__.py @@ -1,3 +1,2 @@ -MOHR_CIRCLE_PATH: str = "visu/mohrCircles/" +MOHR_CIRCLE_PATH: str = "geos_posp/visu/mohrCircles/" MOHR_CIRCLE_ANALYSIS_MAIN = "mainMohrCircles.py" -MOHR_CIRCLE_EVOLUTION_MAIN = "mainMohrCircleEvolution.py" diff --git a/geos-posp/tests/mainTests.py b/geos-posp/tests/mainTests.py deleted file mode 100644 index 46ec2bd9..00000000 --- a/geos-posp/tests/mainTests.py +++ /dev/null @@ -1,26 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -import os -import sys -import unittest - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.join( os.path.dirname( dir_path ), "src" ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - - -def main() -> None: - """Run all tests.""" - # Load all test cases in the current folder - test_loader = unittest.TestLoader() - test_suite = test_loader.discover( ".", pattern="tests*.py" ) - - # Run the test suite - runner = unittest.TextTestRunner() - runner.run( test_suite ) - - -if __name__ == "__main__": - main() diff --git a/geos-posp/tests/testsFunctionsGeosLogReader.py b/geos-posp/tests/testsFunctionsGeosLogReader.py deleted file mode 100644 index 3ca47eaf..00000000 --- a/geos-posp/tests/testsFunctionsGeosLogReader.py +++ /dev/null @@ -1,432 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# ruff: noqa: E402 # disable Module level import not at top of file -import contextlib -import io -import os -import sys -import unittest - -from typing_extensions import Self - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.join( os.path.dirname( dir_path ), "src" ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - -from geos.utils.enumUnits import Unit, getSIUnits -from geos.pv.geosLogReaderUtils import geosLogReaderFunctions as utils - - -class TestsFunctionsGeosLogReader( unittest.TestCase ): - - def test_replaceSpecialCharactersWithWhitespace( self: Self ) -> None: - """Test replaceSpecialCharactersWithWhitespace function.""" - example: str = "hi '(_there(''&*$^,:;'" - expected: str = "hi there " - obtained: str = utils.replaceSpecialCharactersWithWhitespace( example ) - self.assertEqual( expected, obtained ) - - def test_formatPropertyName( self: Self ) -> None: - """Test formatPropertyName function.""" - example: str = " Delta pressure min" - expected: str = "DeltaPressureMin" - obtained: str = utils.formatPropertyName( example ) - self.assertEqual( expected, obtained ) - - def test_extractRegion( self: Self ) -> None: - """Test extractRegion function.""" - example: str = ( "Adding Object CellElementRegion named Reservoir from" - " ObjectManager::Catalog." ) - expected: str = "Reservoir" - obtained: str = utils.extractRegion( example ) - self.assertEqual( expected, obtained ) - - def test_extractStatsName( self: Self ) -> None: - """Test extractStatsName function.""" - example: str = ( "compflowStatistics, Reservoir: Pressure (min, average, max): " - "2.86419e+07, 2.93341e+07, 3.006e+07 Pa" ) - expected: str = "compflowStatistics" - obtained: str = utils.extractStatsName( example ) - self.assertEqual( expected, obtained ) - - def test_extractPhaseModel( self: Self ) -> None: - """Test extractPhaseModel function.""" - example: str = ( " TableFunction: " - "fluid_phaseModel1_PhillipsBrineDensity_table" ) - expected: str = "PhillipsBrineDensity" - obtained: str = utils.extractPhaseModel( example ) - self.assertEqual( expected, obtained ) - - def test_buildPropertiesNameForPhases( self: Self ) -> None: - """Test buildPropertiesNameForPhases function.""" - example_block: str = " Mobile phase mass" - example_phases: list[ str ] = [ "CO2", "Water" ] - expected: list[ str ] = [ " Mobile CO2 mass", " Mobile Water mass" ] - obtained: list[ str ] = utils.buildPropertiesNameForPhases( example_block, example_phases ) - self.assertEqual( expected, obtained ) - - def test_buildPropertiesNameForComponents( self: Self ) -> None: - """Test buildPropertiesNameForComponents function.""" - example: list[ str ] = [ "CO2", "Water" ] - expected: list[ str ] = [ - "Dissolved mass CO2 in CO2", - "Dissolved mass Water in CO2", - "Dissolved mass CO2 in Water", - "Dissolved mass Water in Water", - ] - obtained: list[ str ] = utils.buildPropertiesNameForComponents( example ) - self.assertEqual( expected, obtained ) - - def test_buildPropertiesNameNoPhases( self: Self ) -> None: - """Test buildPropertiesNameNoPhases function.""" - example_name_block: str = " Delta pressure " - example_extensions: str = "min, max)" - expected: list[ str ] = [ " Delta pressure min", " Delta pressure max" ] - obtained: list[ str ] = utils.buildPropertiesNameNoPhases( example_name_block, example_extensions ) - self.assertEqual( expected, obtained ) - - def test_buildPropertiesNameNoPhases2( self: Self ) -> None: - """Test buildPropertiesNameNoPhases function.""" - example: str = " Delta pressure " - expected: list[ str ] = [ " Delta pressure " ] - obtained: list[ str ] = utils.buildPropertiesNameNoPhases( example ) - self.assertEqual( expected, obtained ) - - def test_buildPropertiesNameFromGeosProperties( self: Self ) -> None: - """Test buildPropertiesNameFromGeosProperties function.""" - examples_phases: list[ str ] = [ "CO2", "Water" ] - example: str = " Pressure (min, average, max)" - expected: list[ str ] = [ " Pressure min", " Pressure average", " Pressure max" ] - obtained: list[ str ] = utils.buildPropertiesNameFromGeosProperties( example, examples_phases ) - self.assertEqual( expected, obtained ) - - example = " Total dynamic pore volume" - expected = [ " Total dynamic pore volume" ] - obtained = utils.buildPropertiesNameFromGeosProperties( example, examples_phases ) - self.assertEqual( expected, obtained ) - - example = " Non-trapped phase mass (metric 1)" - expected = [ " Non-trapped CO2 mass ", " Non-trapped Water mass " ] - obtained = utils.buildPropertiesNameFromGeosProperties( example, examples_phases ) - self.assertEqual( expected, obtained ) - - example = " Dissolved component mass" - expected = [ - "Dissolved mass CO2 in CO2", - "Dissolved mass Water in CO2", - "Dissolved mass CO2 in Water", - "Dissolved mass Water in Water", - ] - obtained = utils.buildPropertiesNameFromGeosProperties( example, examples_phases ) - self.assertEqual( expected, obtained ) - - example = " Component mass" - expected = [ - "Dissolved mass CO2 in CO2", - "Dissolved mass Water in CO2", - "Dissolved mass CO2 in Water", - "Dissolved mass Water in Water", - ] - obtained = utils.buildPropertiesNameFromGeosProperties( example, examples_phases ) - self.assertEqual( expected, obtained ) - - def test_extractPropertiesFlow( self: Self ) -> None: - """Test extractPropertiesFlow function.""" - example_block: str = ( "compflowStatistics, Reservoir: Trapped phase mass (metric 1):" - " { 0, 1.9147e+10 } kg" ) - examples_phases: list[ str ] = [ "CO2", "Water" ] - expected: list[ str ] = [ - "Reservoir__TrappedCO2Mass", - "Reservoir__TrappedWaterMass", - ] - obtained: list[ str ] = utils.extractPropertiesFlow( example_block, examples_phases ) - self.assertEqual( expected, obtained ) - - example_block = ( "compflowStatistics, Reservoir: Phase mass:" - " { 0, 1.01274e+14 } kg" ) - expected = [ "Reservoir__CO2Mass", "Reservoir__WaterMass" ] - obtained = utils.extractPropertiesFlow( example_block, examples_phases ) - self.assertEqual( expected, obtained ) - - example_block = ( "compflowStatistics, Region1 (time 4320000 s): Pressure" - " (min, average, max): 10984659.811871096, 11257138.433702637," - " 11535137.236653088 Pa" ) - expected = [ - "Region1__PressureMin", - "Region1__PressureAverage", - "Region1__PressureMax", - ] - obtained = utils.extractPropertiesFlow( example_block, examples_phases ) - self.assertEqual( expected, obtained ) - - def test_countNumberLines( self: Self ) -> None: - """Test countNumberLines function.""" - log1: str = os.path.join( dir_path, "Data/job_GEOS_825200.out" ) - expected1: int = 24307 - obtained1: int = utils.countNumberLines( log1 ) - self.assertEqual( expected1, obtained1 ) - - def test_extractValuesFlow( self: Self ) -> None: - """Test extractValuesFlow function.""" - example: str = ( "compflowStatistics, Reservoir: Pressure (min, average, max):" - " 1.25e+07, 1.25e+07, 1.25e+07 Pa" ) - expected: list[ float ] = [ 1.25e07, 1.25e07, 1.25e07 ] - obtained: list[ float ] = utils.extractValuesFlow( example ) - self.assertEqual( expected, obtained ) - - example = ( "compflowStatistics, Reservoir: Phase dynamic pore volumes:" - " { 0, 6.61331e+07 } rm^3" ) - expected = [ 0.0, 6.61331e07 ] - obtained = utils.extractValuesFlow( example ) - self.assertEqual( expected, obtained ) - - example = ( "compflowStatistics, Reservoir: Dissolved component mass:" - " { { 0, 0 }, { 0, 6.38235e+10 } } kg" ) - expected = [ 0.0, 0.0, 0.0, 6.38235e10 ] - obtained = utils.extractValuesFlow( example ) - self.assertEqual( expected, obtained ) - - example = ( "compflowStatistics, Reservoir: Cell fluid mass" - " (min, max): 10765.1, 2.2694e+10 kg" ) - expected = [ 10765.1, 2.2694e10 ] - obtained = utils.extractValuesFlow( example ) - self.assertEqual( expected, obtained ) - - example = ( "compflowStatistics, Region1 (time 256800000 s): Pressure" - " (min, average, max): 10023287.92961521, 10271543.591259222," - " 10525096.98374942 Pa" ) - expected = [ 10023287.92961521, 10271543.591259222, 10525096.98374942 ] - obtained = utils.extractValuesFlow( example ) - self.assertEqual( expected, obtained ) - - example = ( "compflowStatistics, Region1 (time 4320000 s): Phase dynamic" - " pore volume: [0, 799999924.1499865] rm^3" ) - expected = [ 0, 799999924.1499865 ] - obtained = utils.extractValuesFlow( example ) - self.assertEqual( expected, obtained ) - - def test_convertValues( self: Self ) -> None: - """Test convertValues function.""" - propertyNames: list[ str ] = [ " Delta pressure min ", " CO2 volume " ] - propertyValues: list[ float ] = [ 1e6, 2e8 ] - propertyUnits: dict[ str, Unit ] = getSIUnits() - expected: list[ float ] = [ 1e6, 2e8 ] - obtained: list[ float ] = utils.convertValues( propertyNames, propertyValues, propertyUnits ) - self.assertEqual( expected, obtained ) - - propertyNames = [ "WellControls__TotalFluidDensity" ] - propertyValues = [ 1e4 ] - expected = [ 1e4 ] - obtained = utils.convertValues( propertyNames, propertyValues, propertyUnits ) - self.assertEqual( expected, obtained ) - - def test_extractWell( self: Self ) -> None: - """Test extractWell function.""" - line = " TableFunction: well.CO2001_ConstantBHP_table" - expected = "well.CO2001" - obtained = utils.extractWell( line ) - self.assertEqual( expected, obtained ) - - def test_identifyCurrentWell( self: Self ) -> None: - """Test identifyCurrentWell function.""" - lastWellName: str = "well1" - line: str = ( "The total rate is 0 kg/s, which corresponds to a" + "total surface volumetric rate of 0 sm3/s" ) - expected: str = "well1" - obtained: str = utils.identifyCurrentWell( line, lastWellName ) - self.assertEqual( expected, obtained ) - - line = ( "Rank 18: well.CO2001: BHP (at the specified reference" + " elevation): 19318538.400682557 Pa" ) - expected = "well.CO2001" - obtained = utils.identifyCurrentWell( line, lastWellName ) - self.assertEqual( expected, obtained ) - - line = ( "wellControls1: BHP (at the specified reference" + " elevation): 12337146.157562563 Pa" ) - expected = "wellControls1" - obtained = utils.identifyCurrentWell( line, lastWellName ) - self.assertEqual( expected, obtained ) - - def test_extractWellTags( self: Self ) -> None: - """Test extractWellTags function.""" - line: str = ( "Rank 18: well.CO2001: BHP " + "(at the specified reference elevation): 193000 Pa" ) - expected: list[ str ] = [ "BHP" ] - obtained: list[ str ] = utils.extractWellTags( line ) - self.assertEqual( expected, obtained ) - - line = ( "The total rate is 0 kg/s, which corresponds" + " to a total surface volumetric rate of 0 sm3/s" ) - expected = [ "total massRate", "total surface volumetricRate" ] - obtained = utils.extractWellTags( line ) - self.assertEqual( expected, obtained ) - - def test_extractValuesWell( self: Self ) -> None: - """Test extractValuesWell function.""" - line: str = ( "Rank 18: well.CO2001: BHP " + "(at the specified reference elevation): 193000 Pa" ) - expected: list[ float ] = [ 193000.0 ] - obtained: list[ float ] = utils.extractValuesWell( line, 1 ) - self.assertEqual( expected, obtained ) - line = ( "The total rate is 0 kg/s, which corresponds" + " to a total surface volumetric rate of 0 sm3/s" ) - expected = [ 0.0, 0.0 ] - obtained = utils.extractValuesWell( line, 2 ) - self.assertEqual( expected, obtained ) - - line = "The phase surface volumetric rate is" + " 1.9466968733035026e-12 sm3/s" - expected = [ 1.9466968733035026e-12 ] - obtained = utils.extractValuesWell( line, 1 ) - self.assertEqual( expected, obtained ) - - def test_extractAquifer( self: Self ) -> None: - """Test extractAquifer function.""" - line: str = " TableFunction:aquifer1_pressureInfluence_table" - expected: str = "aquifer1" - obtained: str = utils.extractAquifer( line ) - self.assertEqual( expected, obtained ) - - def test_extractValueAndNameAquifer( self: Self ) -> None: - """Test extractValueAndNameAquifer function.""" - line: str = ( "FlowSolverBase compositionalMultiphaseFlow" + - " (SimuDeck_aquifer_pression_meme.xml, l.28): at time 100s, the" + - " boundary condition 'aquifer1' produces a flux of" + - " -0.6181975187076816 kg (or moles if useMass=0)." ) - expected: tuple[ str, float ] = ( "aquifer1", -0.6181975187076816 ) - obtained: tuple[ str, float ] = utils.extractValueAndNameAquifer( line ) - self.assertEqual( expected, obtained ) - - line = ( "FlowSolverBase compositionalMultiphaseFVMSolver" + - " (nl_multiphase_with_well_reservoir_homo_for_Pierre_versionPaul" + - "_hysteresisIX.xml, l.31): at time 25636.406820617773s, the" + - " boundary condition 'Aquifer3' produces a flux of" + - " -0.8441759009606705 kg (or moles if useMass=0). " ) - expected = ( "Aquifer3", -0.8441759009606705 ) - obtained = utils.extractValueAndNameAquifer( line ) - self.assertEqual( expected, obtained ) - - def test_extractNewtonIter( self: Self ) -> None: - """Test extractNewtonIter function.""" - line: str = " Attempt: 2, ConfigurationIter: 1, NewtonIter: 8" - expected: int = 8 - obtained: int = utils.extractNewtonIter( line ) - self.assertEqual( expected, obtained ) - - def test_extractLinearIter( self: Self ) -> None: - """Test extractLinearIter function.""" - line: str = ( " Linear Solver | Success | Iterations: 23 | Final Rel Res:" + - " 5.96636e-05 | Make Restrictor Time: 0 | Compute Auu Time: 0 |" + - " SC Filter Time: 0 | Setup Time: 1.5156 s | Solve Time:" + " 0.041093 s" ) - expected: int = 23 - obtained: int = utils.extractLinearIter( line ) - self.assertEqual( expected, obtained ) - - def test_timeInSecond( self: Self ) -> None: - """Test timeInSecond function.""" - timeCounter: dict[ str, float ] = { - "years": 0, - "days": 0, - "hrs": 0, - "min": 0, - "s": 0, - } - expected: float = 0.0 - obtained: float = utils.timeInSecond( timeCounter ) - self.assertEqual( expected, obtained ) - - timeCounter = { "years": 1, "days": 1, "hrs": 1, "min": 1, "s": 1 } - expected = 31647661.0 - obtained = utils.timeInSecond( timeCounter ) - self.assertEqual( expected, obtained ) - - def test_extractTimeAndDt( self: Self ) -> None: - """Test extractTimeAndDt function.""" - line: str = "Time: 1 s, dt: 1 s, Cycle: 0" - expected: tuple[ float, float ] = ( 1.0, 1.0 ) - obtained: tuple[ float, float ] = utils.extractTimeAndDt( line ) - self.assertEqual( expected, obtained ) - - line = "Time: 1s, dt: 1s, Cycle: 0" - expected = ( 1.0, 1.0 ) - obtained = utils.extractTimeAndDt( line ) - self.assertEqual( expected, obtained ) - - line = "Time: 1e5s, dt: 1e6s, Cycle: 0" - expected = ( 1.0e5, 1.0e6 ) - obtained = utils.extractTimeAndDt( line ) - self.assertEqual( expected, obtained ) - - line = "Time: 1 min, dt: 1 s, Cycle: 0" - expected = ( 60.0, 1.0 ) - obtained = utils.extractTimeAndDt( line ) - self.assertEqual( expected, obtained ) - - line = "Time: 1 hrs, dt: 1 s, Cycle: 0" - expected = ( 3600.0, 1.0 ) - obtained = utils.extractTimeAndDt( line ) - self.assertEqual( expected, obtained ) - - line = "Time: 1 days, dt: 1 s, Cycle: 0" - expected = ( 86400.0, 1.0 ) - obtained = utils.extractTimeAndDt( line ) - self.assertEqual( expected, obtained ) - - line = "Time: 1 years, 1 days, 1 hrs, 1 min, 1 s, dt: 1 s, Cycle: 1" - expected = ( 31647661.0, 1.0 ) - obtained = utils.extractTimeAndDt( line ) - self.assertEqual( expected, obtained ) - - def test_identifyProperties( self: Self ) -> None: - """Test identifyProperties function.""" - properties: list[ str ] = [ "WellControls_TotalFluidDensity" ] - expected: list[ str ] = [ "35:WellControls_TotalFluidDensity" ] - obtained: list[ str ] = utils.identifyProperties( properties ) - self.assertEqual( expected, obtained ) - - def test_findNumberPhasesSimulation( self: Self ) -> None: - """Test findNumberPhasesSimulation function.""" - filename: str = "job_GEOS_825200.out" - pathToFile: str = os.path.join( dir_path, "Data/" ) - filepath: str = os.path.join( pathToFile, filename ) - expected: int = 2 - obtained: int = utils.findNumberPhasesSimulation( filepath ) - self.assertEqual( expected, obtained ) - - def test_transformUserChoiceToListPhases( self: Self ) -> None: - """Test phaseNameBuilder function with 3 phases.""" - userChoice: str = "phase0 phase1 phase2" - expected: list[ str ] = [ "phase0", "phase1", "phase2" ] - obtained: list[ str ] = utils.transformUserChoiceToListPhases( userChoice ) - self.assertEqual( expected, obtained ) - - userChoice = "phase0, phase1, phase2" - expected = [ "phase0", "phase1", "phase2" ] - obtained = utils.transformUserChoiceToListPhases( userChoice ) - self.assertEqual( expected, obtained ) - - userChoice = "phase0; phase1; phase2" - expected = [] - capturedOutput = io.StringIO() - with contextlib.redirect_stdout( capturedOutput ): - obtained = utils.transformUserChoiceToListPhases( userChoice ) - self.assertEqual( expected, obtained ) - self.assertGreater( len( capturedOutput.getvalue() ), 0 ) - - def test_phaseNamesBuilder( self: Self ) -> None: - """Test phaseNameBuilder function with 4 phases.""" - phasesFromUser: list[ str ] = [] - expected: list[ str ] = [ "phase0", "phase1", "phase2", "phase3" ] - obtained: list[ str ] = utils.phaseNamesBuilder( 4, phasesFromUser ) - self.assertEqual( expected, obtained ) - - phasesFromUser = [ "water", "gas" ] - expected = [ "water", "gas", "phase2", "phase3" ] - obtained = utils.phaseNamesBuilder( 4, phasesFromUser ) - self.assertEqual( expected, obtained ) - - phasesFromUser = [ "water", "CO2", "N2", "H2", "CH4" ] - expected = [ "water", "CO2", "N2", "H2" ] - obtained = utils.phaseNamesBuilder( 4, phasesFromUser ) - self.assertEqual( expected, obtained ) - - # TODO def test_extractValuesFromBlockWhenMultipleComponents(self :Self) - - -if __name__ == "__main__": - unittest.main() diff --git a/geos-posp/tests/testsGeosLogReaderConvergence.py b/geos-posp/tests/testsGeosLogReaderConvergence.py deleted file mode 100644 index 75981264..00000000 --- a/geos-posp/tests/testsGeosLogReaderConvergence.py +++ /dev/null @@ -1,70 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# ruff: noqa: E402 # disable Module level import not at top of file -import os -import sys -import unittest - -import pandas as pd # type: ignore[import-untyped] -from typing_extensions import Self - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.join( os.path.dirname( dir_path ), "src" ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - -from geos.utils.UnitRepository import Unit, UnitRepository -from geos_posp.readers.GeosLogReaderConvergence import GeosLogReaderConvergence - -unitsObjSI: UnitRepository = UnitRepository() -conversionFactors: dict[ str, Unit ] = unitsObjSI.getPropertiesUnit() -pathFlowSim: str = os.path.join( dir_path, "Data/small_job_GEOS_246861.out" ) - - -class TestsFunctionsGeosLogReaderConvergence( unittest.TestCase ): - - def test1_readAllSimulation( self: Self ) -> None: - """Test convergence reader.""" - obj: GeosLogReaderConvergence = GeosLogReaderConvergence( pathFlowSim, conversionFactors ) - expectedPropertiesValues = { - "23:NewtonIter": [ 1, 1, 1 ], - "24:LinearIter": [ 1, 2, 2 ], - "23:CumulatedNewtonIter": [ 1, 2, 3 ], - "24:CumulatedLinearIter": [ 1, 3, 5 ], - } - self.assertEqual( - list( obj.m_solversIterationsValues.keys() ), - list( expectedPropertiesValues.keys() ), - ) - self.assertEqual( obj.m_solversIterationsValues, expectedPropertiesValues ) - expectedTimesteps: list[ float ] = [ 0.0, 8600.0, 25724.3 ] - self.assertEqual( obj.m_timesteps, expectedTimesteps ) - expectedDts: list[ float ] = [ 8600.0, 17124.3, 34165.3 ] - self.assertEqual( obj.m_dts, expectedDts ) - expectedDF: pd.DataFrame = pd.DataFrame() - columns_name = [ - "23:NewtonIter", - "24:LinearIter", - "23:CumulatedNewtonIter", - "24:CumulatedLinearIter", - "Time__s", - "dt__s", - ] - values: list[ list[ float ] ] = [ - [ 1.0, 1.0, 1.0 ], - [ 1.0, 2.0, 2.0 ], - [ 1.0, 2.0, 3.0 ], - [ 1.0, 3.0, 5.0 ], - [ 0.0, 8600.0, 25724.3 ], - [ 8600.0, 17124.3, 34165.3 ], - ] - for column_name, value in zip( columns_name, values, strict=False ): - expectedDF[ column_name ] = value - obtainedDF: pd.DataFrame = obj.createDataframe() - self.assertEqual( list( obtainedDF.columns ), columns_name ) - self.assertTrue( expectedDF.equals( obtainedDF ) ) - - -if __name__ == "__main__": - unittest.main() diff --git a/geos-posp/tests/testsGeosLogReaderWells.py b/geos-posp/tests/testsGeosLogReaderWells.py deleted file mode 100644 index cc1920de..00000000 --- a/geos-posp/tests/testsGeosLogReaderWells.py +++ /dev/null @@ -1,228 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# ruff: noqa: E402 # disable Module level import not at top of file -import contextlib -import io -import os -import sys -import unittest - -from typing_extensions import Self - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.join( os.path.dirname( dir_path ), "src" ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - -import pandas as pd # type: ignore[import-untyped] -from geos.utils.UnitRepository import Unit, UnitRepository -from geos_posp.readers.GeosLogReaderWells import GeosLogReaderWells - -unitsObjSI = UnitRepository() -conversionFactors: dict[ str, Unit ] = unitsObjSI.getPropertiesUnit() -pathFlowSim2: str = os.path.join( dir_path, "Data/small_job_GEOS_825200_wells.out" ) -pathFlowSim3: str = os.path.join( dir_path, "Data/runsinglephase.txt" ) -pathFlowSim4: str = os.path.join( dir_path, "Data/small_job_GEOS_891567.out" ) -pathFlowSim5: str = os.path.join( dir_path, "Data/job_GEOS_935933.out" ) - - -class TestsGeosLogReaderWells( unittest.TestCase ): - - def test1_readAllSimulation2( self: Self ) -> None: - """Test reading wells with 3 wells and 2 phases.""" - obj: GeosLogReaderWells = GeosLogReaderWells( pathFlowSim2, conversionFactors, [ "CO2", "water" ], 3 ) - self.assertEqual( obj.m_phaseNames, [ "CO2", "water" ] ) - self.assertEqual( obj.m_wellNames, [ "wellControls1", "wellControls2", "wellControls3" ] ) - expectedPropertiesValues = { - "11:WellControls1__BHP": [ 12337146.157562563, 27252686.916117527 ], - "12:WellControls1__TotalMassRate": [ 56.37348443784919, 56.32837354192296 ], - "13:WellControls1__TotalSurfaceVolumetricRate": [ 30.024025669350802, 30 ], - "14:WellControls1__SurfaceVolumetricRateCO2": [ - 30.023748128796043, - 29.9997226815373, - ], - "14:WellControls1__SurfaceVolumetricRateWater": [ - 0.00027754055475897704, - 0.00027731846270264625, - ], - "11:WellControls2__BHP": [ 13268440.020500632, 27450836.07294756 ], - "12:WellControls2__TotalMassRate": [ 56.38455514210006, 56.32837354192296 ], - "13:WellControls2__TotalSurfaceVolumetricRate": [ 30.029921829787227, 30 ], - "14:WellControls2__SurfaceVolumetricRateCO2": [ - 30.029644234728664, - 29.9997226815373, - ], - "14:WellControls2__SurfaceVolumetricRateWater": [ - 0.0002775950585639181, - 0.00027731846270264126, - ], - "11:WellControls3__BHP": [ 12318458.650753867, 26935804.208137162 ], - "12:WellControls3__TotalMassRate": [ 56.379144772078966, 56.32837354192296 ], - "13:WellControls3__TotalSurfaceVolumetricRate": [ 30.027040313946728, 30 ], - "14:WellControls3__SurfaceVolumetricRateCO2": [ - 30.02676274552475, - 29.9997226815373, - ], - "14:WellControls3__SurfaceVolumetricRateWater": [ - 0.0002775684219790799, - 0.00027731846270264625, - ], - "15:MeanBHP": [ 12641348.276272355, 27213109.065734085 ], - "16:MeanTotalMassRate": [ 56.37906145067607, 56.32837354192296 ], - "17:MeanTotalVolumetricRate": [ 30.02699593769492, 30 ], - "18:MeanSurfaceVolumetricRateCO2": [ 30.026718369683152, 29.999722681537303 ], - "18:MeanSurfaceVolumetricRateWater": [ - 0.00027756801176732497, - 0.00027731846270264457, - ], - } - self.assertEqual( - list( obj.m_wellsPropertiesValues.keys() ), - list( expectedPropertiesValues.keys() ), - ) - self.assertEqual( obj.m_wellsPropertiesValues, expectedPropertiesValues ) - expectedTimesteps: list[ float ] = [ 0.0, 3.1536e07 ] - self.assertEqual( obj.m_timesteps, expectedTimesteps ) - expectedDF: pd.DataFrame = pd.DataFrame() - columns_name = [ - "11:WellControls1__BHP__Pa", - "12:WellControls1__TotalMassRate__kg/s", - "13:WellControls1__TotalSurfaceVolumetricRate__m3/s", - "14:WellControls1__SurfaceVolumetricRateCO2__m3/s", - "14:WellControls1__SurfaceVolumetricRateWater__m3/s", - "11:WellControls2__BHP__Pa", - "12:WellControls2__TotalMassRate__kg/s", - "13:WellControls2__TotalSurfaceVolumetricRate__m3/s", - "14:WellControls2__SurfaceVolumetricRateCO2__m3/s", - "14:WellControls2__SurfaceVolumetricRateWater__m3/s", - "11:WellControls3__BHP__Pa", - "12:WellControls3__TotalMassRate__kg/s", - "13:WellControls3__TotalSurfaceVolumetricRate__m3/s", - "14:WellControls3__SurfaceVolumetricRateCO2__m3/s", - "14:WellControls3__SurfaceVolumetricRateWater__m3/s", - "15:MeanBHP__Pa", - "16:MeanTotalMassRate__kg/s", - "17:MeanTotalVolumetricRate__m3/s", - "18:MeanSurfaceVolumetricRateCO2__m3/s", - "18:MeanSurfaceVolumetricRateWater__m3/s", - "Time__s", - ] - values = [ - [ 12337146.157562563, 27252686.916117527 ], - [ 56.37348443784919, 56.32837354192296 ], - [ 30.024025669350802, 30 ], - [ 30.023748128796043, 29.9997226815373 ], - [ 0.00027754055475897704, 0.00027731846270264625 ], - [ 13268440.020500632, 27450836.07294756 ], - [ 56.38455514210006, 56.32837354192296 ], - [ 30.029921829787227, 30 ], - [ 30.029644234728664, 29.9997226815373 ], - [ 0.0002775950585639181, 0.00027731846270264126 ], - [ 12318458.650753867, 26935804.208137162 ], - [ 56.379144772078966, 56.32837354192296 ], - [ 30.027040313946728, 30 ], - [ 30.02676274552475, 29.9997226815373 ], - [ 0.0002775684219790799, 0.00027731846270264625 ], - [ 12641348.276272355, 27213109.065734085 ], - [ 56.37906145067607, 56.32837354192296 ], - [ 30.02699593769492, 30 ], - [ 30.026718369683152, 29.999722681537303 ], - [ 0.00027756801176732497, 0.00027731846270264457 ], - [ 0.0, 3.1536e07 ], - ] - for column_name, value in zip( columns_name, values, strict=False ): - expectedDF[ column_name ] = value - obtainedDF: pd.DataFrame = obj.createDataframe() - self.assertEqual( list( obtainedDF.columns ), columns_name ) - self.assertTrue( expectedDF.equals( obtainedDF ) ) - - def test3_readAllSimulation4( self: Self ) -> None: - """Test reading wells with 1 well and 2 phases.""" - obj: GeosLogReaderWells = GeosLogReaderWells( pathFlowSim4, conversionFactors, [ "CO2", "water" ], 1 ) - self.assertEqual( obj.m_phaseNames, [ "CO2", "water" ] ) - self.assertEqual( obj.m_wellNames, [ "well_1Control" ] ) - expectedPropertiesValues = { - "11:Well1Control__BHP": [ 23960094.51907003, 23318529.329811733 ], - "12:Well1Control__TotalMassRate": [ - 0.00029566997749602594, - 0.00029566997749602507, - ], - "13:Well1Control__TotalSurfaceVolumetricRate": [ - 2.0000000000000063e-05, - 2e-05, - ], - "14:Well1Control__SurfaceVolumetricRateCO2": [ - 1.9999999999987377e-05, - 1.999999999998732e-05, - ], - "14:Well1Control__SurfaceVolumetricRateWater": [ - 1.2681312543855673e-17, - 1.2681312543888312e-17, - ], - "15:MeanBHP": [ 23960094.51907003, 23318529.329811733 ], - "16:MeanTotalMassRate": [ 0.00029566997749602594, 0.00029566997749602507 ], - "17:MeanTotalVolumetricRate": [ 2.0000000000000063e-05, 2e-05 ], - "18:MeanSurfaceVolumetricRateCO2": [ - 1.9999999999987377e-05, - 1.999999999998732e-05, - ], - "18:MeanSurfaceVolumetricRateWater": [ - 1.2681312543855673e-17, - 1.2681312543888312e-17, - ], - } - self.assertEqual( - list( obj.m_wellsPropertiesValues.keys() ), - list( expectedPropertiesValues.keys() ), - ) - self.assertEqual( obj.m_wellsPropertiesValues, expectedPropertiesValues ) - expectedTimesteps: list[ float ] = [ 0.0, 100.0 ] - self.assertEqual( obj.m_timesteps, expectedTimesteps ) - expectedDF: pd.DataFrame = pd.DataFrame() - columns_name = [ - "11:Well1Control__BHP__Pa", - "12:Well1Control__TotalMassRate__kg/s", - "13:Well1Control__TotalSurfaceVolumetricRate__m3/s", - "14:Well1Control__SurfaceVolumetricRateCO2__m3/s", - "14:Well1Control__SurfaceVolumetricRateWater__m3/s", - "15:MeanBHP__Pa", - "16:MeanTotalMassRate__kg/s", - "17:MeanTotalVolumetricRate__m3/s", - "18:MeanSurfaceVolumetricRateCO2__m3/s", - "18:MeanSurfaceVolumetricRateWater__m3/s", - "Time__s", - ] - values = [ - [ 23960094.51907003, 23318529.329811733 ], - [ 0.00029566997749602594, 0.00029566997749602507 ], - [ 2.0000000000000063e-05, 2e-05 ], - [ 1.9999999999987377e-05, 1.999999999998732e-05 ], - [ 1.2681312543855673e-17, 1.2681312543888312e-17 ], - [ 23960094.51907003, 23318529.329811733 ], - [ 0.00029566997749602594, 0.00029566997749602507 ], - [ 2.0000000000000063e-05, 2e-05 ], - [ 1.9999999999987377e-05, 1.999999999998732e-05 ], - [ 1.2681312543855673e-17, 1.2681312543888312e-17 ], - [ 0.0, 100.0 ], - ] - for column_name, value in zip( columns_name, values, strict=False ): - expectedDF[ column_name ] = value - obtainedDF: pd.DataFrame = obj.createDataframe() - self.assertEqual( list( obtainedDF.columns ), columns_name ) - self.assertTrue( expectedDF.equals( obtainedDF ) ) - - def test_invalidWellName( self: Self ) -> None: - """Test output message in case of invalid well names.""" - # TODO - # Message being output is not tested in itself :Self, only its appearance - capturedOutput = io.StringIO() - with contextlib.redirect_stdout( capturedOutput ): - obj: GeosLogReaderWells = GeosLogReaderWells( pathFlowSim5, conversionFactors, [ "CO2", "water" ], 1 ) - self.assertGreater( len( capturedOutput.getvalue() ), 0 ) - self.assertEqual( obj.m_phaseNames, [ "CO2", "water" ] ) - self.assertEqual( obj.m_wellNames, [ "well_1Control" ] ) - - -if __name__ == "__main__": - unittest.main() diff --git a/geos-posp/tests/testsInvalidLogs.py b/geos-posp/tests/testsInvalidLogs.py deleted file mode 100644 index 9d707975..00000000 --- a/geos-posp/tests/testsInvalidLogs.py +++ /dev/null @@ -1,59 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# ruff: noqa: E402 # disable Module level import not at top of file -import contextlib -import io -import os -import sys -import unittest - -from typing_extensions import Self - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.join( os.path.dirname( dir_path ), "src" ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - -from geos.utils.UnitRepository import Unit, UnitRepository -from geos_posp.readers.GeosLogReaderAquifers import GeosLogReaderAquifers -from geos_posp.readers.GeosLogReaderConvergence import GeosLogReaderConvergence -from geos_posp.readers.GeosLogReaderFlow import GeosLogReaderFlow -from geos_posp.readers.GeosLogReaderWells import GeosLogReaderWells - -unitsObjSI: UnitRepository = UnitRepository() -conversionFactors: dict[ str, Unit ] = unitsObjSI.getPropertiesUnit() -pathFlowSim: str = os.path.join( dir_path, "Data/empty.txt" ) - - -class TestsInvalidLogs( unittest.TestCase ): - - def test_emptyLog( self: Self ) -> None: - """Test empty log.""" - # capturedOutWells = io.StringIO() - with self.assertRaises( AssertionError ): - objWells = GeosLogReaderWells( # noqa: F841 - pathFlowSim, conversionFactors, [], 1 ) - - capturedOutAquif = io.StringIO() - with contextlib.redirect_stdout( capturedOutAquif ): - objAquif = GeosLogReaderAquifers( # noqa: F841 - pathFlowSim, conversionFactors ) - expectedOutAquif: str = ( "Invalid Geos log file. Please check that your log " + - "did not crash and contains aquifers." ) - self.assertEqual( capturedOutAquif.getvalue().strip(), expectedOutAquif ) - - with self.assertRaises( AssertionError ): - objFlow = GeosLogReaderFlow( # noqa: F841 - pathFlowSim, conversionFactors ) # noqa: F841 - - capturedOutConv = io.StringIO() - with contextlib.redirect_stdout( capturedOutConv ): - objConv = GeosLogReaderConvergence( # noqa: F841 - pathFlowSim, conversionFactors ) - expectedOutConv: str = ( "Invalid Geos log file. Please check that your log " + "did not crash." ) - self.assertEqual( capturedOutConv.getvalue().strip(), expectedOutConv ) - - -if __name__ == "__main__": - unittest.main() diff --git a/geos-pv/pyproject.toml b/geos-pv/pyproject.toml index 96e7f14b..22d9675e 100644 --- a/geos-pv/pyproject.toml +++ b/geos-pv/pyproject.toml @@ -24,5 +24,13 @@ dynamic = ["dependencies"] # dependency to use if install together with paraview paraview = ["paraview"] -[project.scripts] - +[tool.pytest.ini_options] +addopts = "--import-mode=importlib" +console_output_style = "count" +pythonpath = [".", "src"] +python_classes = "Test" +python_files = "test*.py" +python_functions = "test*" +testpaths = ["tests"] +norecursedirs = "bin" +filterwarnings = [] \ No newline at end of file diff --git a/geos-pv/tests/testsFunctionsFigure2DGenerator.py b/geos-pv/tests/testsFunctionsFigure2DGenerator.py deleted file mode 100644 index 119b5630..00000000 --- a/geos-pv/tests/testsFunctionsFigure2DGenerator.py +++ /dev/null @@ -1,184 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# ruff: noqa: E402 # disable Module level import not at top of file -import os -import sys -import unittest - -from typing_extensions import Self - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.join( os.path.dirname( dir_path ), "src" ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - -from geos_posp.visu.pythonViewUtils import functionsFigure2DGenerator as utils - - -class TestsFunctionsFigure2DGenerator( unittest.TestCase ): - - def test_associatePropertyToAxeType( self: Self ) -> None: - """Test of associatePropertyToAxeType function.""" - example: list[ str ] = [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls3__BHP__Pa__Source1", - "WellControls3__TotalMassRate__tons/day__Source1", - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - "Mean__BHP__Pa__Source1", - "Mean__TotalMassRate__tons/day__Source1", - "Mean_TotalVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1", - ] - expected: dict[ str, list[ str ] ] = { - "BHP (Pa)": [ - "WellControls1__BHP__Pa__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls3__BHP__Pa__Source1", - "Mean__BHP__Pa__Source1", - ], - "MassRate (kg/s)": [ - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - ], - "VolumetricRate (m3/s)": [ - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - ], - "MassRate (tons/day)": [ - "WellControls3__TotalMassRate__tons/day__Source1", - "Mean__TotalMassRate__tons/day__Source1", - ], - "VolumetricRate (bbl/day)": [ - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - "Mean_TotalVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1", - ], - } - obtained: dict[ str, list[ str ] ] = utils.associatePropertyToAxeType( example ) - self.assertEqual( expected, obtained ) - - def test_propertiesPerIdentifier( self: Self ) -> None: - """Test of propertiesPerIdentifier function.""" - propertyNames: list[ str ] = [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - ] - expected: dict[ str, list[ str ] ] = { - "WellControls1": [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - ], - "WellControls2": [ - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - ], - } - obtained = utils.propertiesPerIdentifier( propertyNames ) - self.assertEqual( expected, obtained ) - - def test_associationIdentifers( self: Self ) -> None: - """Test of associationIdentifiers function.""" - propertyNames: list[ str ] = [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls3__BHP__Pa__Source1", - "WellControls3__TotalMassRate__tons/day__Source1", - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - "Mean__BHP__Pa__Source1", - "Mean__TotalMassRate__tons/day__Source1", - "Mean__TotalSurfaceVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1", - ] - expected: dict[ str, dict[ str, list[ str ] ] ] = { - "WellControls1": { - "BHP (Pa)": [ - "WellControls1__BHP__Pa__Source1", - ], - "MassRate (kg/s)": [ - "WellControls1__TotalMassRate__kg/s__Source1", - ], - "VolumetricRate (m3/s)": [ - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - ], - }, - "WellControls2": { - "BHP (Pa)": [ - "WellControls2__BHP__Pa__Source1", - ], - "MassRate (kg/s)": [ - "WellControls2__TotalMassRate__kg/s__Source1", - ], - "VolumetricRate (m3/s)": [ - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - ], - }, - "WellControls3": { - "BHP (Pa)": [ - "WellControls3__BHP__Pa__Source1", - ], - "MassRate (tons/day)": [ - "WellControls3__TotalMassRate__tons/day__Source1", - ], - "VolumetricRate (bbl/day)": [ - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - ], - }, - "Mean": { - "BHP (Pa)": [ - "Mean__BHP__Pa__Source1", - ], - "MassRate (tons/day)": [ - "Mean__TotalMassRate__tons/day__Source1", - ], - "VolumetricRate (bbl/day)": [ - "Mean__TotalSurfaceVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1", - ], - }, - } - obtained = utils.associationIdentifiers( propertyNames ) - self.assertEqual( expected, obtained ) - - -if __name__ == "__main__": - unittest.main() From 32463d25815f64f67af167ef566ca007cb95bfb1 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Mon, 7 Apr 2025 14:12:53 +0200 Subject: [PATCH 12/20] fix doc issues --- docs/geos_pv_docs/PVplugins.rst | 2 +- docs/geos_pv_docs/utils.rst | 2 +- docs/index.rst | 6 ++++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/geos_pv_docs/PVplugins.rst b/docs/geos_pv_docs/PVplugins.rst index a46fd982..2ce4ca50 100644 --- a/docs/geos_pv_docs/PVplugins.rst +++ b/docs/geos_pv_docs/PVplugins.rst @@ -4,7 +4,7 @@ Paraview plugins This package contains Paraview plugins that can be loaded in Paraview. PVplugins.PVGeosLogReader module ---------------------------------------------- +---------------------------------- .. automodule:: PVplugins.PVGeosLogReader :members: diff --git a/docs/geos_pv_docs/utils.rst b/docs/geos_pv_docs/utils.rst index e0a3982a..3a868de5 100644 --- a/docs/geos_pv_docs/utils.rst +++ b/docs/geos_pv_docs/utils.rst @@ -13,7 +13,7 @@ geos.pv.utils.checkboxFunction module :show-inheritance: geos.pv.utils.DisplayOrganizationParaview module ---------------------------------------------- +------------------------------------------------- .. automodule:: geos.pv.utils.DisplayOrganizationParaview :members: diff --git a/docs/index.rst b/docs/index.rst index f05ca861..02cdb4a6 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -80,11 +80,13 @@ Packages geos-ats + geos-geomechanics + geos-mesh geos-posp - - geos-geomechanics + + geos-pv geos-timehistory From 7d5fe5d039b3ee9393515487aae56e4cf86e0f56 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Mon, 7 Apr 2025 14:26:21 +0200 Subject: [PATCH 13/20] fix ci geos-geomecanics and linting --- .github/workflows/typing-check.yml | 2 +- geos-posp/src/PVplugins/PVAttributeMapping.py | 2 +- geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py | 2 +- geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py | 2 +- geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py | 2 +- .../src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py | 2 +- geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py | 2 +- geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py | 2 +- geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py | 2 +- geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py | 2 +- .../src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py | 2 +- geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py | 2 +- geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py | 2 +- geos-posp/src/PVplugins/PVMohrCirclePlot.py | 2 +- geos-posp/src/PVplugins/PVSurfaceGeomechanics.py | 2 +- geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py | 2 +- 16 files changed, 16 insertions(+), 16 deletions(-) diff --git a/.github/workflows/typing-check.yml b/.github/workflows/typing-check.yml index 4de56171..55340473 100644 --- a/.github/workflows/typing-check.yml +++ b/.github/workflows/typing-check.yml @@ -16,7 +16,7 @@ jobs: max-parallel: 3 matrix: # add packages to check typing - package-name: ["hdf5-geomechanics", "geos-posp", "geos-timehistory", "geos-utils", "geos-xml-tools", "hdf5-wrapper"] + package-name: ["geos-geomechanics", "geos-posp", "geos-timehistory", "geos-utils", "geos-xml-tools", "hdf5-wrapper"] steps: - uses: actions/checkout@v3 diff --git a/geos-posp/src/PVplugins/PVAttributeMapping.py b/geos-posp/src/PVplugins/PVAttributeMapping.py index edb715ed..989d6394 100644 --- a/geos-posp/src/PVplugins/PVAttributeMapping.py +++ b/geos-posp/src/PVplugins/PVAttributeMapping.py @@ -13,7 +13,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.Logger import Logger, getLogger from geos_posp.filters.AttributeMappingFromCellCoords import ( diff --git a/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py b/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py index 23007575..7aba2271 100644 --- a/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py +++ b/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py @@ -15,7 +15,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 import vtkmodules.util.numpy_support as vnp from geos.utils.Logger import Logger, getLogger diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py index 3295d4c5..cd0814f0 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py @@ -15,7 +15,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py index e9927b52..6233809b 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py @@ -16,7 +16,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py index 2dd49639..3de64962 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py @@ -16,7 +16,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py index 4210128e..2519d41c 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py @@ -19,7 +19,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py b/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py index fceb51f0..d1e44c26 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py @@ -19,7 +19,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py index f5d44fef..c50e5c22 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py @@ -16,7 +16,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py index 14b7fb24..0bfae2ee 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py @@ -16,7 +16,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py index 10c7bc73..fff90856 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py @@ -16,7 +16,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py index 9733766c..210fd933 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py @@ -16,7 +16,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, diff --git a/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py b/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py index b406a092..1bdc9666 100644 --- a/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py +++ b/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py @@ -13,7 +13,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.Logger import Logger, getLogger from geos_posp.processing.vtkUtils import mergeBlocks diff --git a/geos-posp/src/PVplugins/PVMohrCirclePlot.py b/geos-posp/src/PVplugins/PVMohrCirclePlot.py index 8ddc690c..be2b5e7d 100644 --- a/geos-posp/src/PVplugins/PVMohrCirclePlot.py +++ b/geos-posp/src/PVplugins/PVMohrCirclePlot.py @@ -27,7 +27,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 import geos_posp.visu.mohrCircles.functionsMohrCircle as mcf import geos_posp.visu.PVUtils.paraviewTreatments as pvt diff --git a/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py b/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py index f41d0034..4857c477 100644 --- a/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py +++ b/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py @@ -13,7 +13,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( diff --git a/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py b/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py index 2b38d7be..046ba939 100644 --- a/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py @@ -12,7 +12,7 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -import PVplugins # noqa: F401 +import PVplugins # noqa: F401 from geos.utils.Logger import Logger, getLogger from geos_posp.filters.TransferAttributesVolumeSurface import ( From 5eec4860311b12c22a9734afb5920c31bc0583fa Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Wed, 9 Apr 2025 17:47:09 +0200 Subject: [PATCH 14/20] update geos-pv doc --- docs/geos-pv.rst | 2 +- docs/geos_posp_docs/modules.rst | 2 -- docs/geos_posp_docs/readers.rst | 36 ------------------- docs/geos_posp_docs/visu.pythonViewUtils.rst | 23 ------------ docs/geos_posp_docs/visu.rst | 4 +-- docs/geos_pv_docs/PVplugins.rst | 12 ------- docs/geos_pv_docs/geosLogReaderUtils.rst | 20 +++++------ docs/geos_pv_docs/home.rst | 9 +++-- docs/geos_pv_docs/readers.rst | 10 ++++++ geos-pv/requirements.txt | 1 - .../{PVplugins => readers}/PVGeosLogReader.py | 0 11 files changed, 26 insertions(+), 93 deletions(-) delete mode 100644 docs/geos_posp_docs/readers.rst delete mode 100644 docs/geos_posp_docs/visu.pythonViewUtils.rst delete mode 100644 docs/geos_pv_docs/PVplugins.rst create mode 100644 docs/geos_pv_docs/readers.rst rename geos-pv/src/{PVplugins => readers}/PVGeosLogReader.py (100%) diff --git a/docs/geos-pv.rst b/docs/geos-pv.rst index 4fbc70dd..fc153b70 100644 --- a/docs/geos-pv.rst +++ b/docs/geos-pv.rst @@ -9,4 +9,4 @@ GEOS Paraview plugins ./geos_pv_docs/modules.rst - ./geos_pv_docs/PVplugins.rst \ No newline at end of file + ./geos_pv_docs/readers.rst \ No newline at end of file diff --git a/docs/geos_posp_docs/modules.rst b/docs/geos_posp_docs/modules.rst index 476a7c57..99fcad60 100644 --- a/docs/geos_posp_docs/modules.rst +++ b/docs/geos_posp_docs/modules.rst @@ -6,8 +6,6 @@ Processing filters - readers - processing pyvistaTools diff --git a/docs/geos_posp_docs/readers.rst b/docs/geos_posp_docs/readers.rst deleted file mode 100644 index 588a5910..00000000 --- a/docs/geos_posp_docs/readers.rst +++ /dev/null @@ -1,36 +0,0 @@ -vtk Readers -=========== - -This package defines vtk readers that allows to load Geos output files. - -geos_posp.readers.GeosLogReaderAquifers module --------------------------------------------------- - -.. automodule:: geos_posp.readers.GeosLogReaderAquifers - :members: - :undoc-members: - :show-inheritance: - -geos_posp.readers.GeosLogReaderConvergence module ------------------------------------------------------ - -.. automodule:: geos_posp.readers.GeosLogReaderConvergence - :members: - :undoc-members: - :show-inheritance: - -geos_posp.readers.GeosLogReaderFlow module ----------------------------------------------- - -.. automodule:: geos_posp.readers.GeosLogReaderFlow - :members: - :undoc-members: - :show-inheritance: - -geos_posp.readers.GeosLogReaderWells module ------------------------------------------------ - -.. automodule:: geos_posp.readers.GeosLogReaderWells - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/geos_posp_docs/visu.pythonViewUtils.rst b/docs/geos_posp_docs/visu.pythonViewUtils.rst deleted file mode 100644 index 68047cfb..00000000 --- a/docs/geos_posp_docs/visu.pythonViewUtils.rst +++ /dev/null @@ -1,23 +0,0 @@ -PythonViewUtils Package -============================ - -This package includes utilities to display cross-plot using the Python View from Paraview. - - -geos_posp.visu.pythonViewUtils.Figure2DGenerator module ----------------------------------------------------------- - -.. automodule:: geos_posp.visu.pythonViewUtils.Figure2DGenerator - :members: - :undoc-members: - :show-inheritance: - -geos_posp.visu.pythonViewUtils.functionsFigure2DGenerator module -------------------------------------------------------------------- - -.. automodule:: geos_posp.visu.pythonViewUtils.functionsFigure2DGenerator - :members: - :undoc-members: - :show-inheritance: - - diff --git a/docs/geos_posp_docs/visu.rst b/docs/geos_posp_docs/visu.rst index c6e782b7..66ec9bf8 100644 --- a/docs/geos_posp_docs/visu.rst +++ b/docs/geos_posp_docs/visu.rst @@ -9,6 +9,4 @@ This package includes visualization tools dedicated to Paraview software. visu.PVUtils - visu.mohrCircles - - visu.pythonViewUtils \ No newline at end of file + visu.mohrCircles \ No newline at end of file diff --git a/docs/geos_pv_docs/PVplugins.rst b/docs/geos_pv_docs/PVplugins.rst deleted file mode 100644 index 2ce4ca50..00000000 --- a/docs/geos_pv_docs/PVplugins.rst +++ /dev/null @@ -1,12 +0,0 @@ -Paraview plugins -================ - -This package contains Paraview plugins that can be loaded in Paraview. - -PVplugins.PVGeosLogReader module ----------------------------------- - -.. automodule:: PVplugins.PVGeosLogReader - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file diff --git a/docs/geos_pv_docs/geosLogReaderUtils.rst b/docs/geos_pv_docs/geosLogReaderUtils.rst index 181d8c34..1b2e2018 100644 --- a/docs/geos_pv_docs/geosLogReaderUtils.rst +++ b/docs/geos_pv_docs/geosLogReaderUtils.rst @@ -4,42 +4,42 @@ GeosLogReaderUtils functions This package define functions dedicated to the GeosLogReader. -geos.pv.GeosLogReaderUtils.GeosLogReaderAquifers module +geos.pv.geosLogReaderUtils.GeosLogReaderAquifers module ------------------------------------------------------------------- -.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderAquifers +.. automodule:: geos.pv.geosLogReaderUtils.GeosLogReaderAquifers :members: :undoc-members: :show-inheritance: -geos.pv.GeosLogReaderUtils.GeosLogReaderConvergence module +geos.pv.geosLogReaderUtils.geosLogReaderConvergence module --------------------------------------------------------------------- -.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderConvergence +.. automodule:: geos.pv.geosLogReaderUtils.GeosLogReaderConvergence :members: :undoc-members: :show-inheritance: -geos.pv.GeosLogReaderUtils.GeosLogReaderFlow module +geos.pv.geosLogReaderUtils.GeosLogReaderFlow module --------------------------------------------------------------- -.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderFlow +.. automodule:: geos.pv.geosLogReaderUtils.GeosLogReaderFlow :members: :undoc-members: :show-inheritance: -geos.pv.GeosLogReaderUtils.GeosLogReaderFunctions module +geos.pv.geosLogReaderUtils.GeosLogReaderFunctions module -------------------------------------------------------------------- -.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderFunctions +.. automodule:: geos.pv.geosLogReaderUtils.geosLogReaderFunctions :members: :undoc-members: :show-inheritance: -geos.pv.GeosLogReaderUtils.GeosLogReaderWells module +geos.pv.geosLogReaderUtils.GeosLogReaderWells module -------------------------------------------------------------------- -.. automodule:: geos.pv.GeosLogReaderUtils.GeosLogReaderWells +.. automodule:: geos.pv.geosLogReaderUtils.GeosLogReaderWells :members: :undoc-members: :show-inheritance: \ No newline at end of file diff --git a/docs/geos_pv_docs/home.rst b/docs/geos_pv_docs/home.rst index 1e42ae41..68c9fff1 100644 --- a/docs/geos_pv_docs/home.rst +++ b/docs/geos_pv_docs/home.rst @@ -5,11 +5,10 @@ Home It includes: -* a reader able to parse the GEOS output log (before commit version #9365098) to collect data and display them as tables; -* tools to clean and check GEOS input mesh; -* tools to clean GEOS output mesh; -* tools to compute additional geomechanical properties from GEOS outputs; -* tools to display Mohr's circles at a given time step and the evolution through time from GEOS outputs. +* Paraview readers allowing to load data; +* generic tools to processes meshes; +* GEOS pre-processing tools to clean and check GEOS input mesh; +* GEOS post-processing tools to clean GEOS output mesh, compute additional properties, or create specific plots such as Mohr's circle plot. The packages can be loaded into Paraview using the Plugin Manager from `Tools > Plugin Manager`. On success, you will see the selected plugin in the `Filters`` menu (see `Paraview documentation `. diff --git a/docs/geos_pv_docs/readers.rst b/docs/geos_pv_docs/readers.rst new file mode 100644 index 00000000..875bffbe --- /dev/null +++ b/docs/geos_pv_docs/readers.rst @@ -0,0 +1,10 @@ +Paraview readers +================ + +readers.PVGeosLogReader module +---------------------------------- + +.. automodule:: readers.PVGeosLogReader + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/geos-pv/requirements.txt b/geos-pv/requirements.txt index e4726a3c..edb1046c 100644 --- a/geos-pv/requirements.txt +++ b/geos-pv/requirements.txt @@ -1,5 +1,4 @@ geos-geomechanics geos-mesh geos-posp -geos-prep geos-utils \ No newline at end of file diff --git a/geos-pv/src/PVplugins/PVGeosLogReader.py b/geos-pv/src/readers/PVGeosLogReader.py similarity index 100% rename from geos-pv/src/PVplugins/PVGeosLogReader.py rename to geos-pv/src/readers/PVGeosLogReader.py From 99686dd6b711c6d8483af84aeb4fcdd45b12759a Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Fri, 11 Apr 2025 09:32:16 +0200 Subject: [PATCH 15/20] Add contribution explanations for geos-pv --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index e49704fa..53c87ddf 100644 --- a/README.md +++ b/README.md @@ -170,6 +170,8 @@ dependencies = [ ] ``` +geos-pv dependencies are managed using a requirements.txt file where all external and internal dependencies are present. It ensures that internal dependency paths are correctly set when plugins are manually loaded into Paraview. + Release ------- From 1759dc556efb1748e05ee380bd2a8feebde5335b Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Mon, 14 Apr 2025 13:22:55 +0200 Subject: [PATCH 16/20] Fix doc ci --- docs/geos_pv_docs/readers.rst | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/geos_pv_docs/readers.rst b/docs/geos_pv_docs/readers.rst index 875bffbe..c7fb7100 100644 --- a/docs/geos_pv_docs/readers.rst +++ b/docs/geos_pv_docs/readers.rst @@ -4,7 +4,4 @@ Paraview readers readers.PVGeosLogReader module ---------------------------------- -.. automodule:: readers.PVGeosLogReader - :members: - :undoc-members: - :show-inheritance: \ No newline at end of file +.. automodule:: readers.PVGeosLogReader \ No newline at end of file From 2c3945529529e647a1d57efa4fd015a04755a849 Mon Sep 17 00:00:00 2001 From: alexbenedicto Date: Mon, 14 Apr 2025 23:32:19 -0700 Subject: [PATCH 17/20] Update pyproject and requirements --- docs/requirements.txt | 12 ++++---- geos-geomechanics/pyproject.toml | 35 +++++++++++++--------- geos-posp/pyproject.toml | 9 ++---- geos-pv/pyproject.toml | 51 +++++++++++++++++++++++++++----- geos-utils/pyproject.toml | 39 +++++++++++++++--------- 5 files changed, 98 insertions(+), 48 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 773420cf..63765007 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -6,14 +6,14 @@ sphinx-design # Running CLI programs and capture outputs sphinxcontrib-programoutput>=0.17 # Installing all package requirements to be able to load all the modules and run the help. -vtk >= 9.1 +vtk >= 9.3 networkx >= 2.4 tqdm -numpy -pandas -typing_extensions -matplotlib>=3.9.4 +numpy >= 1.26 +pandas >= 2.2 +typing_extensions >= 4.12 +matplotlib >= 3.9.4 h5py -lxml>=4.5.0 +lxml >= 4.5.0 parameterized pyvista diff --git a/geos-geomechanics/pyproject.toml b/geos-geomechanics/pyproject.toml index d49048c3..7fc2deb8 100644 --- a/geos-geomechanics/pyproject.toml +++ b/geos-geomechanics/pyproject.toml @@ -7,7 +7,7 @@ include-package-data = true [tool.setuptools.packages.find] where = ["src"] -include = ["geos_geomechanics*"] +include = ["geos.geomechanics*"] exclude = ['tests*'] [project] @@ -19,36 +19,44 @@ maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenerg license = {text = "Apache-2.0"} classifiers = [ "Development Status :: 4 - Beta", - "Programming Language :: Python" + "Programming Language :: Python", + "License :: OSI Approved :: Apache Software License ", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3 :: Only", ] + +requires-python = ">= 3.10" + dependencies=[ + "geos-utils @ file:./geos-utils", "vtk >= 9.3", - "numpy >= 1.26", "pandas >= 2.2", - "typing_extensions >= 4.12", - "geos-utils @ file:./geos-utils", ] -requires-python = ">= 3.10" + +[project.urls] +Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" +Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geospythonpackages/en/latest/" +Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" +"Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" [project.optional-dependencies] build = [ "build ~= 1.2" ] dev = [ - "yapf", - "mypy", + "mypy", + "yapf", ] test = [ "pytest", + "pytest-cov" ] -[project.scripts] - - [tool.pytest.ini_options] addopts = "--import-mode=importlib" console_output_style = "count" -pythonpath = ["src"] +pythonpath = [".", "src"] python_classes = "Test" python_files = "test*.py" python_functions = "test*" @@ -56,7 +64,6 @@ testpaths = ["tests"] norecursedirs = "bin" filterwarnings = [] - [tool.coverage.run] branch = true -source = ["src/geos"] \ No newline at end of file +source = ["src/geos/geomechanics"] \ No newline at end of file diff --git a/geos-posp/pyproject.toml b/geos-posp/pyproject.toml index d9e9d26a..e2c01ec5 100644 --- a/geos-posp/pyproject.toml +++ b/geos-posp/pyproject.toml @@ -33,15 +33,9 @@ keywords = [ requires-python = ">= 3.10" dependencies = [ - "vtk >= 9.3", - "numpy >= 1.26", - "pandas >= 2.2", - "typing_extensions >= 4.12", - "geos-utils @ file:./geos-utils", "geos-geomechanics @ file:./geos-geomechanics", ] - [project.urls] Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geospythonpackages/en/latest/" @@ -49,6 +43,9 @@ Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" "Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" [project.optional-dependencies] +build = [ + "build ~= 1.2" +] dev = [ "mypy", "yapf", diff --git a/geos-pv/pyproject.toml b/geos-pv/pyproject.toml index 22d9675e..ff7b5789 100644 --- a/geos-pv/pyproject.toml +++ b/geos-pv/pyproject.toml @@ -2,27 +2,58 @@ requires = ["setuptools>=61.2"] build-backend = "setuptools.build_meta" +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] +include = ["geos.pv*"] +exclude = ['tests*'] + [project] name = "geos-pv" version = "0.1.0" description = "geos-pv is a Python package that gathers Paraview plugins and dedicated utils to process and visualize GEOS inputs and outputs." authors = [{name = "GEOS Contributors" }] -maintainers = [ - {name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"} -] +maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] license = {text = "Apache-2.0"} classifiers = [ - "Development Status :: 4 - Beta", - "Programming Language :: Python" + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "License :: OSI Approved :: Apache Software License ", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3 :: Only", ] -requires-python = ">=3.10" +requires-python = ">= 3.10" + +dependencies=[ + "geos-utils @ file:./geos-utils", + "vtk >= 9.3", + "pandas >= 2.2", +] -dynamic = ["dependencies"] +[project.urls] +Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" +Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geospythonpackages/en/latest/" +Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" +"Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" [project.optional-dependencies] # dependency to use if install together with paraview paraview = ["paraview"] +build = [ + "build ~= 1.2" +] +dev = [ + "mypy", + "yapf", +] +test = [ + "pytest", + "pytest-cov" +] [tool.pytest.ini_options] addopts = "--import-mode=importlib" @@ -33,4 +64,8 @@ python_files = "test*.py" python_functions = "test*" testpaths = ["tests"] norecursedirs = "bin" -filterwarnings = [] \ No newline at end of file +filterwarnings = [] + +[tool.coverage.run] +branch = true +source = ["src"] \ No newline at end of file diff --git a/geos-utils/pyproject.toml b/geos-utils/pyproject.toml index b655fd74..49a28382 100644 --- a/geos-utils/pyproject.toml +++ b/geos-utils/pyproject.toml @@ -2,27 +2,43 @@ requires = ["setuptools>=61.2"] build-backend = "setuptools.build_meta" +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] +include = ["geos.utils*"] +exclude = ['tests*'] + [project] name = "geos-utils" version = "0.1.0" description = "geos-utils is a Python package that gathers utilities common to all GEOS python packages." authors = [{name = "GEOS Contributors" }] -maintainers = [ - {name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"} -] +maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] license = {text = "Apache-2.0"} classifiers = [ - "Development Status :: 4 - Beta", - "Programming Language :: Python" + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "License :: OSI Approved :: Apache Software License ", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3 :: Only", ] -requires-python = ">=3.9" +requires-python = ">= 3.10" dependencies = [ - "typing_extensions", - "numpy", + "typing_extensions >= 4.12", + "numpy >= 1.26", ] +[project.urls] +Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" +Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geospythonpackages/en/latest/" +Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" +"Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" + [project.optional-dependencies] build = [ "build ~= 1.2" @@ -36,10 +52,6 @@ test = [ "pytest-cov" ] - -[project.scripts] - - [tool.pytest.ini_options] addopts = "--import-mode=importlib" console_output_style = "count" @@ -53,5 +65,4 @@ filterwarnings = [] [tool.coverage.run] branch = true -source = ["src/geos/utils"] - +source = ["src/geos/utils"] \ No newline at end of file From 625b45d8c8ff56c9d209563a9816374e53824329 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Tue, 15 Apr 2025 10:22:50 +0200 Subject: [PATCH 18/20] Revert "Update pyproject and requirements" This reverts commit 2c3945529529e647a1d57efa4fd015a04755a849. --- docs/requirements.txt | 12 ++++---- geos-geomechanics/pyproject.toml | 35 +++++++++------------- geos-posp/pyproject.toml | 9 ++++-- geos-pv/pyproject.toml | 51 +++++--------------------------- geos-utils/pyproject.toml | 39 +++++++++--------------- 5 files changed, 48 insertions(+), 98 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 63765007..773420cf 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -6,14 +6,14 @@ sphinx-design # Running CLI programs and capture outputs sphinxcontrib-programoutput>=0.17 # Installing all package requirements to be able to load all the modules and run the help. -vtk >= 9.3 +vtk >= 9.1 networkx >= 2.4 tqdm -numpy >= 1.26 -pandas >= 2.2 -typing_extensions >= 4.12 -matplotlib >= 3.9.4 +numpy +pandas +typing_extensions +matplotlib>=3.9.4 h5py -lxml >= 4.5.0 +lxml>=4.5.0 parameterized pyvista diff --git a/geos-geomechanics/pyproject.toml b/geos-geomechanics/pyproject.toml index 7fc2deb8..d49048c3 100644 --- a/geos-geomechanics/pyproject.toml +++ b/geos-geomechanics/pyproject.toml @@ -7,7 +7,7 @@ include-package-data = true [tool.setuptools.packages.find] where = ["src"] -include = ["geos.geomechanics*"] +include = ["geos_geomechanics*"] exclude = ['tests*'] [project] @@ -19,44 +19,36 @@ maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenerg license = {text = "Apache-2.0"} classifiers = [ "Development Status :: 4 - Beta", - "Programming Language :: Python", - "License :: OSI Approved :: Apache Software License ", - "Natural Language :: English", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python" ] - -requires-python = ">= 3.10" - dependencies=[ - "geos-utils @ file:./geos-utils", "vtk >= 9.3", + "numpy >= 1.26", "pandas >= 2.2", + "typing_extensions >= 4.12", + "geos-utils @ file:./geos-utils", ] - -[project.urls] -Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" -Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geospythonpackages/en/latest/" -Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" -"Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" +requires-python = ">= 3.10" [project.optional-dependencies] build = [ "build ~= 1.2" ] dev = [ - "mypy", - "yapf", + "yapf", + "mypy", ] test = [ "pytest", - "pytest-cov" ] +[project.scripts] + + [tool.pytest.ini_options] addopts = "--import-mode=importlib" console_output_style = "count" -pythonpath = [".", "src"] +pythonpath = ["src"] python_classes = "Test" python_files = "test*.py" python_functions = "test*" @@ -64,6 +56,7 @@ testpaths = ["tests"] norecursedirs = "bin" filterwarnings = [] + [tool.coverage.run] branch = true -source = ["src/geos/geomechanics"] \ No newline at end of file +source = ["src/geos"] \ No newline at end of file diff --git a/geos-posp/pyproject.toml b/geos-posp/pyproject.toml index e2c01ec5..d9e9d26a 100644 --- a/geos-posp/pyproject.toml +++ b/geos-posp/pyproject.toml @@ -33,9 +33,15 @@ keywords = [ requires-python = ">= 3.10" dependencies = [ + "vtk >= 9.3", + "numpy >= 1.26", + "pandas >= 2.2", + "typing_extensions >= 4.12", + "geos-utils @ file:./geos-utils", "geos-geomechanics @ file:./geos-geomechanics", ] + [project.urls] Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geospythonpackages/en/latest/" @@ -43,9 +49,6 @@ Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" "Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" [project.optional-dependencies] -build = [ - "build ~= 1.2" -] dev = [ "mypy", "yapf", diff --git a/geos-pv/pyproject.toml b/geos-pv/pyproject.toml index ff7b5789..22d9675e 100644 --- a/geos-pv/pyproject.toml +++ b/geos-pv/pyproject.toml @@ -2,58 +2,27 @@ requires = ["setuptools>=61.2"] build-backend = "setuptools.build_meta" -[tool.setuptools] -include-package-data = true - -[tool.setuptools.packages.find] -where = ["src"] -include = ["geos.pv*"] -exclude = ['tests*'] - [project] name = "geos-pv" version = "0.1.0" description = "geos-pv is a Python package that gathers Paraview plugins and dedicated utils to process and visualize GEOS inputs and outputs." authors = [{name = "GEOS Contributors" }] -maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] +maintainers = [ + {name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"} +] license = {text = "Apache-2.0"} classifiers = [ - "Development Status :: 4 - Beta", - "Programming Language :: Python", - "License :: OSI Approved :: Apache Software License ", - "Natural Language :: English", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3 :: Only", + "Development Status :: 4 - Beta", + "Programming Language :: Python" ] -requires-python = ">= 3.10" - -dependencies=[ - "geos-utils @ file:./geos-utils", - "vtk >= 9.3", - "pandas >= 2.2", -] +requires-python = ">=3.10" -[project.urls] -Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" -Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geospythonpackages/en/latest/" -Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" -"Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" +dynamic = ["dependencies"] [project.optional-dependencies] # dependency to use if install together with paraview paraview = ["paraview"] -build = [ - "build ~= 1.2" -] -dev = [ - "mypy", - "yapf", -] -test = [ - "pytest", - "pytest-cov" -] [tool.pytest.ini_options] addopts = "--import-mode=importlib" @@ -64,8 +33,4 @@ python_files = "test*.py" python_functions = "test*" testpaths = ["tests"] norecursedirs = "bin" -filterwarnings = [] - -[tool.coverage.run] -branch = true -source = ["src"] \ No newline at end of file +filterwarnings = [] \ No newline at end of file diff --git a/geos-utils/pyproject.toml b/geos-utils/pyproject.toml index 49a28382..b655fd74 100644 --- a/geos-utils/pyproject.toml +++ b/geos-utils/pyproject.toml @@ -2,43 +2,27 @@ requires = ["setuptools>=61.2"] build-backend = "setuptools.build_meta" -[tool.setuptools] -include-package-data = true - -[tool.setuptools.packages.find] -where = ["src"] -include = ["geos.utils*"] -exclude = ['tests*'] - [project] name = "geos-utils" version = "0.1.0" description = "geos-utils is a Python package that gathers utilities common to all GEOS python packages." authors = [{name = "GEOS Contributors" }] -maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] +maintainers = [ + {name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"} +] license = {text = "Apache-2.0"} classifiers = [ - "Development Status :: 4 - Beta", - "Programming Language :: Python", - "License :: OSI Approved :: Apache Software License ", - "Natural Language :: English", - "Operating System :: OS Independent", - "Programming Language :: Python :: 3 :: Only", + "Development Status :: 4 - Beta", + "Programming Language :: Python" ] -requires-python = ">= 3.10" +requires-python = ">=3.9" dependencies = [ - "typing_extensions >= 4.12", - "numpy >= 1.26", + "typing_extensions", + "numpy", ] -[project.urls] -Homepage = "https://github.com/GEOS-DEV/geosPythonPackages" -Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geospythonpackages/en/latest/" -Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" -"Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" - [project.optional-dependencies] build = [ "build ~= 1.2" @@ -52,6 +36,10 @@ test = [ "pytest-cov" ] + +[project.scripts] + + [tool.pytest.ini_options] addopts = "--import-mode=importlib" console_output_style = "count" @@ -65,4 +53,5 @@ filterwarnings = [] [tool.coverage.run] branch = true -source = ["src/geos/utils"] \ No newline at end of file +source = ["src/geos/utils"] + From 33e78e08be2c38df49849047cf7323f8bf306f25 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Tue, 15 Apr 2025 10:52:17 +0200 Subject: [PATCH 19/20] Harmonize dependency versions --- docs/requirements.txt | 30 +++++++++++++------------- geos-geomechanics/pyproject.toml | 8 +++---- geos-mesh/pyproject.toml | 37 ++++++++++++++++++++++++-------- geos-posp/pyproject.toml | 14 +++++++----- geos-prep/pyproject.toml | 2 +- geos-pv/pyproject.toml | 6 ++---- geos-utils/pyproject.toml | 22 ++++++++++++------- 7 files changed, 73 insertions(+), 46 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 773420cf..f911d8f8 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,19 +1,19 @@ -sphinx >= 7.4.7 -sphinx_rtd_theme -sphinx-argparse >= 0.5.2 -sphinx-autodoc-typehints -sphinx-design +sphinx >= 8.2 +sphinx_rtd_theme >= 3.0 +sphinx-argparse >= 0.5 +sphinx-autodoc-typehints >= 3.1 +sphinx-design >= 0.6 # Running CLI programs and capture outputs -sphinxcontrib-programoutput>=0.17 +sphinxcontrib-programoutput >= 0.17 # Installing all package requirements to be able to load all the modules and run the help. -vtk >= 9.1 +vtk >= 9.3 networkx >= 2.4 -tqdm -numpy -pandas -typing_extensions +tqdm >= 4.67 +numpy >= 2.2 +pandas >= 2.2 +typing_extensions > 4.12 matplotlib>=3.9.4 -h5py -lxml>=4.5.0 -parameterized -pyvista +h5py >= 3.12 +lxml >= 4.5.0 +parameterized >= 0.9 +pyvista >= 0.44 diff --git a/geos-geomechanics/pyproject.toml b/geos-geomechanics/pyproject.toml index d49048c3..16439ad6 100644 --- a/geos-geomechanics/pyproject.toml +++ b/geos-geomechanics/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=61.2"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" [tool.setuptools] @@ -7,7 +7,7 @@ include-package-data = true [tool.setuptools.packages.find] where = ["src"] -include = ["geos_geomechanics*"] +include = ["geos.geomechanics*"] exclude = ['tests*'] [project] @@ -22,11 +22,11 @@ classifiers = [ "Programming Language :: Python" ] dependencies=[ + "geos-utils @ file:./geos-utils", "vtk >= 9.3", - "numpy >= 1.26", + "numpy >= 2.2", "pandas >= 2.2", "typing_extensions >= 4.12", - "geos-utils @ file:./geos-utils", ] requires-python = ">= 3.10" diff --git a/geos-mesh/pyproject.toml b/geos-mesh/pyproject.toml index 03708863..2317c68b 100644 --- a/geos-mesh/pyproject.toml +++ b/geos-mesh/pyproject.toml @@ -1,29 +1,35 @@ [build-system] -requires = ["setuptools>=42", "wheel"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] +include = ["geos.mesh*"] +exclude = ['tests*'] + [project] name = "geos-mesh" version = "0.0.1" description = "GEOS mesh tools" authors = [{name = "GEOS Contributors" }] -maintainers = [ - {name = "Christopher Sherman", email = "sherman27@llnl.gov"} -] +maintainers = [{name = "Christopher Sherman", email = "sherman27@llnl.gov"}] license = {text = "LGPL-2.1"} classifiers = [ "Development Status :: 4 - Beta", "Programming Language :: Python" ] -requires-python = ">=3.8" +requires-python = ">=3.10" dependencies = [ - "vtk >= 9.1", + "vtk >= 9.3", "networkx >= 2.4", - "tqdm", - "numpy", - "meshio>=5.3.2", + "tqdm >= 4.67", + "numpy >= 2.2", + "meshio >= 5.3", ] [project.scripts] @@ -36,6 +42,19 @@ Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geosp Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" "Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" +[project.optional-dependencies] +build = [ + "build ~= 1.2" +] +dev = [ + "mypy", + "yapf", +] +test = [ + "pytest-cov", + "pytest" +] + [tool.pytest.ini_options] addopts = [ "--import-mode=importlib", diff --git a/geos-posp/pyproject.toml b/geos-posp/pyproject.toml index d9e9d26a..b51c1ca7 100644 --- a/geos-posp/pyproject.toml +++ b/geos-posp/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=61.2"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" [tool.setuptools] @@ -14,7 +14,8 @@ exclude = ['tests*'] name = "geos-posp" version = "1.0.0" description = "The Python package geos-posp is dedicated to post-process data from the geos simulation tool." -authors = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] +authors = [{name = "GEOS Contributors" }] +maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] license = {text = "Apache-2.0"} classifiers = [ "Intended Audience :: Developers", @@ -33,12 +34,12 @@ keywords = [ requires-python = ">= 3.10" dependencies = [ + "geos-geomechanics @ file:./geos-geomechanics", + "geos-utils @ file:./geos-utils", "vtk >= 9.3", - "numpy >= 1.26", + "numpy >= 2.2", "pandas >= 2.2", "typing_extensions >= 4.12", - "geos-utils @ file:./geos-utils", - "geos-geomechanics @ file:./geos-geomechanics", ] @@ -49,6 +50,9 @@ Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" "Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" [project.optional-dependencies] +build = [ + "build ~= 1.2" +] dev = [ "mypy", "yapf", diff --git a/geos-prep/pyproject.toml b/geos-prep/pyproject.toml index fb0100c4..1f0789ae 100644 --- a/geos-prep/pyproject.toml +++ b/geos-prep/pyproject.toml @@ -31,7 +31,7 @@ keywords = [ ] dependencies = [ "vtk >= 9.3", - "numpy >= 1.26", + "numpy >= 2.2", "pandas >= 2.2", "typing_extensions >= 4.12", "geos_posp >=1.0", diff --git a/geos-pv/pyproject.toml b/geos-pv/pyproject.toml index 22d9675e..0444641c 100644 --- a/geos-pv/pyproject.toml +++ b/geos-pv/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=61.2"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" [project] @@ -7,9 +7,7 @@ name = "geos-pv" version = "0.1.0" description = "geos-pv is a Python package that gathers Paraview plugins and dedicated utils to process and visualize GEOS inputs and outputs." authors = [{name = "GEOS Contributors" }] -maintainers = [ - {name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"} -] +maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] license = {text = "Apache-2.0"} classifiers = [ "Development Status :: 4 - Beta", diff --git a/geos-utils/pyproject.toml b/geos-utils/pyproject.toml index b655fd74..4a3cc054 100644 --- a/geos-utils/pyproject.toml +++ b/geos-utils/pyproject.toml @@ -1,26 +1,32 @@ [build-system] -requires = ["setuptools>=61.2"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] +include = ["geos.utils*"] +exclude = ['tests*'] + [project] name = "geos-utils" version = "0.1.0" description = "geos-utils is a Python package that gathers utilities common to all GEOS python packages." authors = [{name = "GEOS Contributors" }] -maintainers = [ - {name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"} -] +maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] license = {text = "Apache-2.0"} classifiers = [ "Development Status :: 4 - Beta", "Programming Language :: Python" ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ - "typing_extensions", - "numpy", + "numpy >= 2.2", + "typing_extensions >= 4.12", ] [project.optional-dependencies] @@ -28,8 +34,8 @@ build = [ "build ~= 1.2" ] dev = [ - "yapf", "mypy", + "yapf", ] test = [ "pytest", From b8f499f4c25ef98f0f3376f56f266964fd1f12c7 Mon Sep 17 00:00:00 2001 From: mlemayTTE Date: Tue, 15 Apr 2025 10:52:56 +0200 Subject: [PATCH 20/20] Update installation instructions --- README.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 53c87ddf..991e45cd 100644 --- a/README.md +++ b/README.md @@ -128,7 +128,11 @@ Installation python -m pytest ./ ``` -**NOTE: geos-pv package cannot be build alone, but together with Paraview ([see Paraview compilation guide](https://gitlab.kitware.com/paraview/paraview/-/blob/master/Documentation/dev/build.md)). It is recommended to use Paraview v5.12+, which is based on python 3.10+. Alternatively, plugins from geos-pv/PVplugins can be manually loaded into Paraview ([see documentation](https://docs.paraview.org/en/latest/ReferenceManual/pythonProgrammableFilter.html#python-algorithm)).** + [!WARNING] + Due to local package conflicts with `pip install`, it is recommended either to build the packages one by one, or to inlude only top-level packages (see dependency tree hereabove) in the build list. + + [!NOTE] + geos-pv package cannot be build alone, but together with Paraview ([see Paraview compilation guide](https://gitlab.kitware.com/paraview/paraview/-/blob/master/Documentation/dev/build.md)). It is recommended to use Paraview v5.12+, which is based on python 3.10+. Alternatively, plugins from geos-pv/PVplugins can be manually loaded into Paraview ([see documentation](https://docs.paraview.org/en/latest/ReferenceManual/pythonProgrammableFilter.html#python-algorithm)). Contributions @@ -141,8 +145,8 @@ If you would like to report a bug, please submit an [issue](https://github.com/G If you would like to contribute to GEOS Python packages, please respect the following guidelines: 1. Create a new branch named from this template: `[CONTRIBUTOR]/[TYPE]/[TITLE]` where CONTRIBUTOR is the name of the contributor, TYPE is the type of contribution among 'feature', 'refactor', 'doc', 'ci', TITLE is a short title for the branch. -1. Add your code trying to integrate into the current code architecture. -1. Push the branch, open a new PR respecting naming [semantics](https://gist.github.com/joshbuchea/6f47e86d2510bce28f8e7f42ae84c716), and add reviewers +2. Add your code trying to integrate into the current code architecture. +3. Push the branch, open a new PR respecting naming [semantics](https://gist.github.com/joshbuchea/6f47e86d2510bce28f8e7f42ae84c716), and add reviewers If you do not have the rights to push the code and open new PRs, consider opening a new issue to explain what you want to do and ask for the dev rights. @@ -170,7 +174,8 @@ dependencies = [ ] ``` -geos-pv dependencies are managed using a requirements.txt file where all external and internal dependencies are present. It ensures that internal dependency paths are correctly set when plugins are manually loaded into Paraview. +[!IMPORTANT] +geos-pv dependencies are managed using a requirements.txt (together with the setup.py) file where all internal (and external if needed) dependencies are present. It ensures that internal dependency paths are correctly set when plugins are manually loaded into Paraview. Release -------