Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/typing-check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
max-parallel: 3
matrix:
# add packages to check typing
package-name: ["hdf5-geomechanics", "geos-posp", "geos-timehistory", "geos-utils", "geos-xml-tools", "hdf5-wrapper"]
package-name: ["geos-geomechanics", "geos-posp", "geos-timehistory", "geos-utils", "geos-xml-tools", "hdf5-wrapper"]

steps:
- uses: actions/checkout@v3
Expand Down
2 changes: 1 addition & 1 deletion geos-timehistory/src/geos/timehistory/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from .plot_time_history import getHistorySeries
from .plot_time_history import getHistorySeries #noqa: F401
70 changes: 40 additions & 30 deletions geos-timehistory/src/geos/timehistory/plot_time_history.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,41 @@
import numpy as np
from typing import Any, Optional
from geos.hdf5_wrapper import wrapper as h5w
import matplotlib as mpl
import matplotlib.pyplot as plt
import os
import sys
import argparse

import re


def isiterable( obj ):
def isiterable( obj: Any ) -> bool:
"""Check if input is iterable."""
try:
it = iter( obj )
it = iter( obj ) # noqa: F841
except TypeError:
return False
return True


def getHistorySeries( database, variable, setname, indices=None, components=None ):
"""
Retrieve a series of time history structures suitable for plotting in addition to the specific set index and component for the time series
def getHistorySeries( database: h5w,
variable: str,
setname: str,
indices: Optional[ int | list[ int ] ] = None,
components: Optional[ int | list[ int ] ] = None ) -> Optional[ list[ tuple[ Any, ...] ] ]:
"""Retrieve a series of time history structures suitable for plotting in addition to the specific set index and component for the time series.

Args:
database (geos.hdf5_wrapper.hdf5_wrapper): database to retrieve time history data from
variable (str): the name of the time history variable for which to retrieve time-series data
setname (str): the name of the index set as specified in the geosx input xml for which to query time-series data
indices (int, list): the indices in the named set to query for, if None, defaults to all
components (int, list): the components in the flattened data types to retrieve, defaults to all
indices (Optional[int | list[ int ]]): the indices in the named set to query for, if None, defaults to all
components (Optional[int | list[ int ]]): the components in the flattened data types to retrieve, defaults to all

Returns:
list: list of (time, data, idx, comp) timeseries tuples for each time history data component
Optional[list[ tuple[ Any, ...] ]]: list of (time, data, idx, comp) timeseries tuples for each time history data component
"""

set_regex = re.compile( variable + '(.*?)', re.IGNORECASE )
if setname is not None:
set_regex = re.compile( variable + '\s*' + str( setname ), re.IGNORECASE )
set_regex = re.compile( variable + r'\s*' + str( setname ), re.IGNORECASE )
time_regex = re.compile( 'Time', re.IGNORECASE ) # need to make this per-set, thought that was in already?

set_match = list( filter( set_regex.match, database.keys() ) )
Expand Down Expand Up @@ -62,39 +63,48 @@ def getHistorySeries( database, variable, setname, indices=None, components=None
print(
f"Error: The length of the time-series {time_match} and data-series {set_match} do not match: {time_series.shape} and {data_series.shape} !"
)

indices1: list[ int ] = []
if indices is not None:
if type( indices ) is int:
indices = [ indices ]
if isiterable( indices ):
oob_idxs = list( filter( lambda idx: not 0 <= idx < data_series.shape[ 1 ], indices ) )
indices1 = [ indices ]
elif isiterable( indices ):
oob_idxs: list[ int ] = list(
filter(
lambda idx: not 0 <= idx < data_series.shape[ 1 ], # type: ignore[arg-type]
indices ) ) # type: ignore[arg-type]
if len( oob_idxs ) > 0:
print( f"Error: The specified indices: ({', '.join(oob_idxs)}) " + "\n\t" +
f" are out of the dataset index range: [0,{data_series.shape[1]})" )
indices = list( set( indices ) - set( oob_idxs ) )
print( f"Error: The specified indices: ({', '.join(map(str, oob_idxs))}) " + "\n\t" +
f" are out of the dataset index range: [0,{data_series.shape[1]})" ) # type: ignore[arg-type]
indices1 = list( set( indices ) - set( oob_idxs ) ) # type: ignore[arg-type]
else:
print( f"Error: unsupported indices type: {type(indices)}" )
else:
indices = range( data_series.shape[ 1 ] )
indices1 = list( range( data_series.shape[ 1 ] ) )

components1: list[ int ] = []
if components is not None:
if type( components ) is int:
components = [ components ]
if isiterable( components ):
oob_comps = list( filter( lambda comp: not 0 <= comp < data_series.shape[ 2 ], components ) )
components1 = [ components ]
elif isiterable( components ):
oob_comps: list[ int ] = list(
filter(
lambda comp: not 0 <= comp < data_series.shape[ 2 ], # type: ignore[arg-type]
components ) ) # type: ignore[arg-type]
if len( oob_comps ) > 0:
print( f"Error: The specified components: ({', '.join(oob_comps)}) " + "\n\t" +
print( f"Error: The specified components: ({', '.join(map(str, oob_comps))}) " + "\n\t" +
" is out of the dataset component range: [0,{data_series.shape[1]})" )
components = list( set( components ) - set( oob_comps ) )
components1 = list( set( components ) - set( oob_comps ) ) # type: ignore[arg-type]
else:
print( f"Error: unsupported components type: {type(components)}" )
else:
components = range( data_series.shape[ 2 ] )
components1 = list( range( data_series.shape[ 2 ] ) )

return [ ( time_series[ :, 0 ], data_series[ :, idx, comp ], idx, comp ) for idx in indices for comp in components ]
return [ ( time_series[ :, 0 ], data_series[ :, idx, comp ], idx, comp ) for idx in indices1
for comp in components1 ]


def commandLinePlotGen():
def commandLinePlotGen() -> int:
"""Parse commande line."""
parser = argparse.ArgumentParser(
description="A script that parses geosx HDF5 time-history files and produces time-history plots using matplotlib"
)
Expand Down
1 change: 1 addition & 0 deletions geos-trame/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ dependencies = [
"colorcet==3.1.0",
"funcy==2.0",
"typing_inspect==0.9.0",
"typing_extensions>=4.12",
]

[project.optional-dependencies]
Expand Down
14 changes: 13 additions & 1 deletion geos-xml-tools/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@ requires-python = ">=3.8"
dependencies = [
"lxml>=4.5.0",
"parameterized",
"numpy"
"numpy>=1.16.2",
"typing_extensions>=4.12"
]

[project.scripts]
Expand All @@ -25,3 +26,14 @@ dependencies = [
test_geosx_xml_tools = "geos.xml_tools.tests.test_manager:main"
check_xml_attribute_coverage = "geos.xml_tools.attribute_coverage:main"
check_xml_redundancy = "geos.xml_tools.xml_redundancy_check:main"

[tool.pytest.ini_options]
addopts = "--import-mode=importlib"
console_output_style = "count"
pythonpath = [".", "src"]
python_classes = "Test"
python_files = "test*.py"
python_functions = "test*"
testpaths = ["tests"]
norecursedirs = "bin"
filterwarnings = []
4 changes: 1 addition & 3 deletions geos-xml-tools/src/geos/xml_tools/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1 @@
"""
A python module that enables advanced xml features for GEOSX.
"""
"""A python module that enables advanced xml features for GEOSX."""
25 changes: 11 additions & 14 deletions geos-xml-tools/src/geos/xml_tools/attribute_coverage.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ def parse_schema_element( root: ElementTree.Element,
xsd: str = '{http://www.w3.org/2001/XMLSchema}',
recursive_types: Iterable[ str ] = [ 'PeriodicEvent', 'SoloEvent', 'HaltEvent' ],
folders: Iterable[ str ] = [ 'src', 'examples' ] ) -> record_type:
"""Parse the xml schema at the current level
"""Parse the xml schema at the current level.

Args:
root (lxml.etree.Element): the root schema node
Expand All @@ -24,7 +24,6 @@ def parse_schema_element( root: ElementTree.Element,
Returns:
dict: Dictionary of attributes and children for the current node
"""

element_type = node.get( 'type' )
element_name = node.get( 'name' )
element_def = root.find( "%scomplexType[@name='%s']" % ( xsd, element_type ) )
Expand All @@ -49,7 +48,7 @@ def parse_schema_element( root: ElementTree.Element,


def parse_schema( fname: str ) -> record_type:
"""Parse the schema file into the xml attribute usage dict
"""Parse the schema file into the xml attribute usage dict.

Args:
fname (str): schema name
Expand All @@ -64,14 +63,14 @@ def parse_schema( fname: str ) -> record_type:


def collect_xml_attributes_level( local_types: record_type, node: ElementTree.Element, folder: str ) -> None:
"""Collect xml attribute usage at the current level
"""Collect xml attribute usage at the current level.

Args:
local_types (dict): dictionary containing attribute usage
node (lxml.etree.Element): current xml node
folder (str): the source folder for the current file
"""
for ka in node.attrib.keys():
for ka in node.attrib:
local_types[ 'attributes' ][ ka ][ folder ].append( node.get( ka ) )

for child in node:
Expand All @@ -80,7 +79,7 @@ def collect_xml_attributes_level( local_types: record_type, node: ElementTree.El


def collect_xml_attributes( xml_types: record_type, fname: str, folder: str ) -> None:
"""Collect xml attribute usage in a file
"""Collect xml attribute usage in a file.

Args:
xml_types (dict): dictionary containing attribute usage
Expand All @@ -97,15 +96,15 @@ def collect_xml_attributes( xml_types: record_type, fname: str, folder: str ) ->
def write_attribute_usage_xml_level( local_types: record_type,
node: ElementTree.Element,
folders: Iterable[ str ] = [ 'src', 'examples' ] ) -> None:
"""Write xml attribute usage file at a given level
"""Write xml attribute usage file at a given level.

Args:
local_types (dict): dict containing attribute usage at the current level
node (lxml.etree.Element): current xml node
folders (Iterable[ str ]): folders. Defaults to [ 'src', 'examples' ].
"""

# Write attributes
for ka in local_types[ 'attributes' ].keys():
for ka in local_types[ 'attributes' ]:
attribute_node = ElementTree.Element( ka )
node.append( attribute_node )

Expand All @@ -129,7 +128,7 @@ def write_attribute_usage_xml_level( local_types: record_type,


def write_attribute_usage_xml( xml_types: record_type, fname: str ) -> None:
"""Write xml attribute usage file
"""Write xml attribute usage file.

Args:
xml_types (dict): dictionary containing attribute usage by xml type
Expand All @@ -143,13 +142,12 @@ def write_attribute_usage_xml( xml_types: record_type, fname: str ) -> None:


def process_xml_files( geosx_root: str, output_name: str ) -> None:
"""Test for xml attribute usage
"""Test for xml attribute usage.

Args:
geosx_root (str): GEOSX root directory
output_name (str): output file name
"""

# Parse the schema
geosx_root = os.path.expanduser( geosx_root )
schema = '%ssrc/coreComponents/schema/schema.xsd' % ( geosx_root )
Expand All @@ -168,13 +166,12 @@ def process_xml_files( geosx_root: str, output_name: str ) -> None:


def main() -> None:
"""Entry point for the xml attribute usage test script
"""Entry point for the xml attribute usage test script.

Args:
-r/--root (str): GEOSX root directory
-o/--output (str): output file name
"""

# Parse the user arguments
parser = command_line_parsers.build_attribute_coverage_input_parser()
args = parser.parse_args()
Expand Down
13 changes: 5 additions & 8 deletions geos-xml-tools/src/geos/xml_tools/command_line_parsers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@


def build_preprocessor_input_parser() -> argparse.ArgumentParser:
"""Build the argument parser
"""Build the argument parser.

Returns:
argparse.ArgumentParser: The parser
Expand All @@ -29,7 +29,7 @@ def build_preprocessor_input_parser() -> argparse.ArgumentParser:


def parse_xml_preprocessor_arguments() -> Tuple[ argparse.Namespace, Iterable[ str ] ]:
"""Parse user arguments
"""Parse user arguments.

Args:
-i/--input (str): Input file name (multiple allowed)
Expand All @@ -46,12 +46,11 @@ def parse_xml_preprocessor_arguments() -> Tuple[ argparse.Namespace, Iterable[ s


def build_xml_formatter_input_parser() -> argparse.ArgumentParser:
"""Build the argument parser
"""Build the argument parser.

Returns:
argparse.ArgumentParser: the parser instance
"""

parser = argparse.ArgumentParser()
parser.add_argument( 'input', type=str, help='Input file name' )
parser.add_argument( '-i', '--indent', type=int, help='Indent size', default=2 )
Expand All @@ -64,25 +63,23 @@ def build_xml_formatter_input_parser() -> argparse.ArgumentParser:


def build_attribute_coverage_input_parser() -> argparse.ArgumentParser:
"""Build attribute coverage redundancy input parser
"""Build attribute coverage redundancy input parser.

Returns:
argparse.ArgumentParser: parser instance
"""

parser = argparse.ArgumentParser()
parser.add_argument( '-r', '--root', type=str, help='GEOSX root', default='' )
parser.add_argument( '-o', '--output', type=str, help='Output file name', default='attribute_test.xml' )
return parser


def build_xml_redundancy_input_parser() -> argparse.ArgumentParser:
"""Build xml redundancy input parser
"""Build xml redundancy input parser.

Returns:
argparse.ArgumentParser: parser instance
"""

parser = argparse.ArgumentParser()
parser.add_argument( '-r', '--root', type=str, help='GEOSX root', default='' )
return parser
Loading