diff --git a/.github/workflows/build_workflow.yml b/.github/workflows/build_workflow.yml index 8824343f8..767f407de 100644 --- a/.github/workflows/build_workflow.yml +++ b/.github/workflows/build_workflow.yml @@ -27,7 +27,7 @@ jobs: shell: bash -l {0} strategy: matrix: - python-version: ["3.10", "3.11", "3.12", "3.13"] + python-version: ["3.10", "3.11", "3.12", "3.13", "3.14"] fail-fast: false steps: - id: skip_check diff --git a/.github/workflows/docs_workflow.yml b/.github/workflows/docs_workflow.yml index a7f5e9b92..385849fd3 100644 --- a/.github/workflows/docs_workflow.yml +++ b/.github/workflows/docs_workflow.yml @@ -10,7 +10,7 @@ on: types: [published] env: - PYTHON_VERSION: "3.13" + PYTHON_VERSION: "3.14" jobs: publish-docs: diff --git a/ci/recipe/meta.yaml b/ci/recipe/meta.yaml index d565adcde..cd147e614 100644 --- a/ci/recipe/meta.yaml +++ b/ci/recipe/meta.yaml @@ -23,7 +23,7 @@ requirements: - pip - setuptools >=60 run: - - python >={{ python_min }},<3.14 + - python >={{ python_min }},<3.13 - cartopy >=0.18.0 - cartopy_offlinedata - cmocean diff --git a/dev-spec.txt b/dev-spec.txt index 588a0621f..a5f9ca359 100644 --- a/dev-spec.txt +++ b/dev-spec.txt @@ -2,7 +2,7 @@ # $ conda create --name --file # Base -python >=3.10,<3.14 +python >=3.10 cartopy >=0.18.0 cartopy_offlinedata cmocean diff --git a/mpas_analysis/__main__.py b/mpas_analysis/__main__.py index cab22ea39..155b5d824 100644 --- a/mpas_analysis/__main__.py +++ b/mpas_analysis/__main__.py @@ -33,7 +33,7 @@ import time import json from importlib.metadata import Distribution -from importlib.resources import contents +from importlib.resources import files from mache import discover_machine, MachineInfo @@ -1075,10 +1075,11 @@ def main(): except FileNotFoundError: possible_machines = [] - machine_configs = contents('mache.machines') - for config in machine_configs: - if config.endswith('.cfg'): - possible_machines.append(os.path.splitext(config)[0]) + machine_configs = files('mache.machines').iterdir() + for config_file in machine_configs: + if config_file.name.endswith('.cfg'): + possible_machines.append( + os.path.splitext(config_file.name)[0]) possible_machines = '\n '.join(sorted(possible_machines)) raise ValueError( diff --git a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py index e0a39b1c2..8806e52d2 100644 --- a/mpas_analysis/ocean/climatology_map_ohc_anomaly.py +++ b/mpas_analysis/ocean/climatology_map_ohc_anomaly.py @@ -182,6 +182,12 @@ class RemapMpasOHCClimatology(RemapMpasClimatologySubtask): min_depth, max_depth : float The minimum and maximum depths for integration + + cp : float + Specific heat of seawater [J/(kg*degC)] + + rho : float + Reference density of seawater [kg/m3] """ def __init__(self, mpas_climatology_task, ref_year_climatology_task, @@ -239,6 +245,8 @@ def __init__(self, mpas_climatology_task, ref_year_climatology_task, self.run_after(ref_year_climatology_task) self.min_depth = min_depth self.max_depth = max_depth + self.cp = None + self.rho = None def setup_and_check(self): """ @@ -255,6 +263,9 @@ def setup_and_check(self): self.ref_year_climatology_task.add_variables(self.variableList, self.seasons) + self.cp = self.namelist.getfloat('config_specific_heat_sea_water') + self.rho = self.namelist.getfloat('config_density0') + def customize_masked_climatology(self, climatology, season): """ Compute the ocean heat content (OHC) anomaly from the temperature @@ -298,10 +309,10 @@ def _compute_ohc(self, climatology): ds_mesh = xr.open_dataset(self.meshFilename) ds_mesh = ds_mesh.isel(Time=0) - # specific heat [J/(kg*degC)] - cp = self.namelist.getfloat('config_specific_heat_sea_water') - # [kg/m3] - rho = self.namelist.getfloat('config_density0') + cp = self.cp + assert cp is not None, "Specific heat 'cp' has not been set" + rho = self.rho + assert rho is not None, "Reference density 'rho' has not been set" units_scale_factor = 1e-9 diff --git a/mpas_analysis/ocean/time_series_ohc_anomaly.py b/mpas_analysis/ocean/time_series_ohc_anomaly.py index b3dbf21d0..a81d56d80 100644 --- a/mpas_analysis/ocean/time_series_ohc_anomaly.py +++ b/mpas_analysis/ocean/time_series_ohc_anomaly.py @@ -31,6 +31,17 @@ class TimeSeriesOHCAnomaly(AnalysisTask): """ Performs analysis of ocean heat content (OHC) from time-series output. + + Attributes + ---------- + cp : float + Specific heat of seawater [J/(kg*degC)] + + rho : float + Reference density of seawater [kg/m3] + + meshFilename : str + The path to the MPAS mesh file """ # Authors # ------- @@ -132,6 +143,21 @@ def __init__(self, config, mpasTimeSeriesTask, controlConfig=None): plotTask.run_after(anomalyTask) self.add_subtask(plotTask) + self.cp = None + self.rho = None + self.meshFilename = None + + def setup_and_check(self): + """ + Store the specific heat and reference density of seawater for use + in OHC calculations. + """ + super().setup_and_check() + + self.cp = self.namelist.getfloat('config_specific_heat_sea_water') + self.rho = self.namelist.getfloat('config_density0') + self.meshFilename = self.get_mesh_filename() + def _compute_ohc(self, ds): """ Compute the OHC time series. @@ -139,10 +165,15 @@ def _compute_ohc(self, ds): # for convenience, rename the variables to simpler, shorter names ds = ds.rename(self.variableDict) - # specific heat [J/(kg*degC)] - cp = self.namelist.getfloat('config_specific_heat_sea_water') - # [kg/m3] - rho = self.namelist.getfloat('config_density0') + # these need to be set at setup time, not at runtime because piclking + # means the namelists and streams objects they come from aren't + # available at runtime + cp = self.cp + assert cp is not None, "Specific heat 'cp' has not been set" + rho = self.rho + assert rho is not None, "Reference density 'rho' has not been set" + meshFile = self.meshFilename + assert meshFile is not None, "Mesh filename has not been set" unitsScalefactor = 1e-22 @@ -152,8 +183,6 @@ def _compute_ohc(self, ds): ds.ohc.attrs['units'] = '$10^{22}$ J' ds.ohc.attrs['description'] = 'Ocean heat content in each region' - meshFile = self.get_mesh_filename() - # Define/read in general variables with xr.open_dataset(meshFile) as dsMesh: # reference depth [m] diff --git a/mpas_analysis/shared/analysis_task.py b/mpas_analysis/shared/analysis_task.py index c72903993..5507b7624 100644 --- a/mpas_analysis/shared/analysis_task.py +++ b/mpas_analysis/shared/analysis_task.py @@ -203,18 +203,7 @@ def setup_and_check(self): self.plotsDirectory = build_config_full_path(self.config, 'output', 'plotsSubdirectory') - namelistFileName = build_config_full_path( - self.config, 'input', - '{}NamelistFileName'.format(self.componentName)) - self.namelist = NameList(namelistFileName) - - streamsFileName = build_config_full_path( - self.config, 'input', - '{}StreamsFileName'.format(self.componentName)) - self.runStreams = StreamsFile(streamsFileName, - streamsdir=self.runDirectory) - self.historyStreams = StreamsFile(streamsFileName, - streamsdir=self.historyDirectory) + self._load_namelists_and_streams() self.calendar = self.namelist.get('config_calendar_type') @@ -282,6 +271,19 @@ def add_subtask(self, subtask): if subtask not in self.subtasks: self.subtasks.append(subtask) + def start(self): + """ + Clear unpicklable attributes and then start the analysis task as a new + process. + """ + # Authors + # ------- + # Xylar Asay-Davis + # clear unpicklable attributes before running the task + self._clear_namelists_and_streams() + + super(AnalysisTask, self).start() + def run(self, writeLogFile=True): """ Sets up logging and then runs the analysis task. @@ -320,6 +322,9 @@ def run(self, writeLogFile=True): startTime = time.time() try: + # reload namelists and streams, since they cannot be pickled + # as part of multiprocessing + self._load_namelists_and_streams() self.run_task() self._runStatus.value = AnalysisTask.SUCCESS except (Exception, BaseException) as e: @@ -525,6 +530,71 @@ def get_mesh_filename(self): return meshFilename + def __getstate__(self): + """ + Customize pickling to exclude unpicklable and unnecessary attributes. + This method is called during multiprocessing when the task is + serialized to be sent to a child process. We exclude task dependencies + and process internals that don't need to be transferred, such as logger + objects, process internals, and weakref-bearing attributes. + + Returns + ------- + state : dict + The object state with unpicklable and unnecessary attributes + removed. + """ + state = self.__dict__.copy() + + # Clear out attributes that should not be pickled + state['namelist'] = None + state['runStreams'] = None + state['historyStreams'] = None + state['runAfterTasks'] = [] + state['subtasks'] = [] + # Drop process internals and logger that can't/shouldn't be pickled + for key in ['_popen', 'logger', '_stackTrace']: + state.pop(key, None) + + # Drop weakref-bearing Finalize, etc., by not pickling _popen at all + # _runStatus is a multiprocessing.Value; depending on your logic, + # you may also want to skip it and let child initialize its own. + + return state + + def _load_namelists_and_streams(self): + """ + Load namelist and streams attributes. + """ + # Authors + # ------- + # Xylar Asay-Davis + + namelistFileName = build_config_full_path( + self.config, 'input', + '{}NamelistFileName'.format(self.componentName)) + self.namelist = NameList(namelistFileName) + + streamsFileName = build_config_full_path( + self.config, 'input', + '{}StreamsFileName'.format(self.componentName)) + self.runStreams = StreamsFile(streamsFileName, + streamsdir=self.runDirectory) + self.historyStreams = StreamsFile(streamsFileName, + streamsdir=self.historyDirectory) + + def _clear_namelists_and_streams(self): + """ + Clear namelist and streams attributes that cannot be pickled for + multiprocessing. + """ + # Authors + # ------- + # Xylar Asay-Davis + + self.namelist = None + self.runStreams = None + self.historyStreams = None # }}} diff --git a/pyproject.toml b/pyproject.toml index c48fe32e6..ffab97d4f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,13 +27,14 @@ description = """\ """ license = { file = "LICENSE" } readme = "README.md" -requires-python = ">=3.10,<3.14" +requires-python = ">=3.10" classifiers = [ # these are only for searching/browsing projects on PyPI "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Development Status :: 5 - Production/Stable", diff --git a/suite/run_dev_suite.bash b/suite/run_dev_suite.bash index f3db1df68..6b865c5a4 100755 --- a/suite/run_dev_suite.bash +++ b/suite/run_dev_suite.bash @@ -19,7 +19,8 @@ cd .. machine=$(python -c "from mache import discover_machine; print(discover_machine())") -py=3.13 +py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') + ./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --copy_docs --clean -e ${env_name} ./suite/setup.py -p ${py} -r wc_defaults -b ${branch} --no_polar_regions -e ${env_name} ./suite/setup.py -p ${py} -r moc_am -b ${branch} -e ${env_name} @@ -33,7 +34,7 @@ py=3.13 # submit the jobs cd ${machine}_test_suite -main_py=3.13 +main_py=${py} cd main_py${main_py} echo main_py${main_py} RES=$(sbatch job_script.bash) diff --git a/suite/run_e3sm_unified_suite.bash b/suite/run_e3sm_unified_suite.bash index b060ede13..32648adfb 100755 --- a/suite/run_e3sm_unified_suite.bash +++ b/suite/run_e3sm_unified_suite.bash @@ -6,7 +6,7 @@ set -e branch=test_e3sm_unified # test building the docs -py=3.13 +py=$(python -c 'import sys; print(f"{sys.version_info[0]}.{sys.version_info[1]}")') machine=${E3SMU_MACHINE} ./suite/setup.py -p ${py} -r main_py${py} -b ${branch} --clean