Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
53e8796
initial bare exception fix
alex-rakowski Nov 9, 2023
fbbc232
correcting multiple exception handling
alex-rakowski Nov 9, 2023
2212461
Changing to Exception if unsure
alex-rakowski Nov 10, 2023
51f078f
changing to assertion error
alex-rakowski Nov 10, 2023
db59746
removing TODO message
alex-rakowski Nov 10, 2023
be609b3
Changing to a Exception
alex-rakowski Nov 10, 2023
08957ff
changing from ValueError to Exception
alex-rakowski Nov 10, 2023
e6b6ade
black
alex-rakowski Nov 10, 2023
857715a
ruff C408
alex-rakowski Nov 10, 2023
7892191
ruff --select C419
alex-rakowski Nov 10, 2023
4288ca2
C409 C405
alex-rakowski Nov 10, 2023
1a00dd1
trailing commas
alex-rakowski Nov 10, 2023
dfa7546
black
alex-rakowski Nov 10, 2023
79b2559
E721
alex-rakowski Nov 10, 2023
0b20634
F401 - unused imports
alex-rakowski Nov 10, 2023
d3ccf03
black
alex-rakowski Nov 10, 2023
9022bee
more E721
alex-rakowski Nov 10, 2023
83a727a
Merge branch 'bare_exceptions' into unnecessary-collection-calls
alex-rakowski Nov 10, 2023
15f387f
Merge pull request #16 from alex-rakowski/unnecessary-collection-calls
alex-rakowski Nov 10, 2023
dd1a74b
removing old comment
alex-rakowski Nov 10, 2023
9657c42
adding KeyError
alex-rakowski Nov 10, 2023
dbfa1b2
adding KeyError to correct place ...
alex-rakowski Nov 10, 2023
6b5dfec
adding hdf5plugin back in
alex-rakowski Nov 10, 2023
89ad26a
Changing to Exception to catch more errors
alex-rakowski Nov 10, 2023
ed34a99
changning cupy GPU count method
alex-rakowski Nov 10, 2023
5275729
black
alex-rakowski Nov 10, 2023
96c96ae
using importlib to populate requirements
alex-rakowski Nov 13, 2023
f304a52
Merge branch 'dev' into bare_exceptions
alex-rakowski Nov 13, 2023
03e9e61
changing find_spec to try except
alex-rakowski Nov 14, 2023
436d1e4
adding suspected KeyError to TODO
alex-rakowski Nov 14, 2023
5f5d895
F811, F523/F524, F841, F741 - Corrections
alex-rakowski Nov 16, 2023
98c21d2
updating to autoscrape optional depends
alex-rakowski Nov 17, 2023
0f9b096
fixing typo in docstring
alex-rakowski Nov 17, 2023
dc9c632
removing unused vars
alex-rakowski Nov 17, 2023
4936f9e
Merge branch 'dev' of github.com:py4dstem/py4DSTEM into crystal-forma…
alex-rakowski Nov 17, 2023
03986fa
adding generic version extra tests
alex-rakowski Nov 17, 2023
8e4f7c9
removing TODO
alex-rakowski Nov 17, 2023
0a77665
ModuleNotFoundErrors
alex-rakowski Nov 17, 2023
92c6942
removing TODOS
alex-rakowski Nov 17, 2023
c280c8e
changing logic for print(Running Extra Checks run)
alex-rakowski Nov 17, 2023
f1a9b3a
cleaning up old commented out code
alex-rakowski Nov 17, 2023
ffa70f9
remove unused imports
alex-rakowski Nov 17, 2023
0b10f48
fixing import test
alex-rakowski Nov 18, 2023
83453df
Merge remote-tracking branch 'upstream/dev' into bare_exceptions
sezelt Mar 8, 2024
19914f6
missing import
sezelt Mar 8, 2024
9397208
other missing import
sezelt Mar 8, 2024
9201154
Merge branch 'crystal-format-fixes' into bare_exceptions
sezelt Mar 8, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions .flake8
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
[flake8]
extend-ignore =
E114,
E115,
E116,
E201,
E202,
E203,
E204,
E231,
E265,
E266,
E303,
E402,
E501,
exclude =
; __init__.py # totally ignore __init__.py files
setup.py # ignore setup.py file
docs/
#F401 ignore unused imports in __init__.py files
#F403 ignore unable to detect undefined names from import *
per-file-ignores =
__init__.py:F401,F403
2 changes: 0 additions & 2 deletions .github/linters/.flake8

This file was deleted.

40 changes: 0 additions & 40 deletions .github/workflows/build-flake.yml
Original file line number Diff line number Diff line change
@@ -1,40 +0,0 @@
# This workflow will install Python dependencies, run tests and lint with a single version of Python
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: Check module can be imported

on:
push:
branches: [ "dev" ]
pull_request:
branches: [ "dev" ]

permissions:
contents: read

jobs:
build:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
# flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test that the module imports
run: |
pip install .
python -c "import py4DSTEM; print(py4DSTEM.__version__)"
17 changes: 11 additions & 6 deletions .github/workflows/linter.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Check for errors with flake8
name: Lint with super-linter@v5-slim

on:
push:
Expand All @@ -17,9 +17,14 @@ jobs:
fetch-depth: 0

- name: Lint Code Base
uses: github/super-linter@v5
uses: super-linter/super-linter/slim@v5 # updated to latest slim as quicker to download
env:
VALIDATE_ALL_CODEBASE: false
VALIDATE_PYTHON_FLAKE8: true
DEFAULT_BRANCH: "dev"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
VALIDATE_ALL_CODEBASE: false # only check changes
VALIDATE_PYTHON_FLAKE8: true # lint with flake8
DEFAULT_BRANCH: "dev" # set default branch to dev
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # for github things
# FILTER_REGEX_EXCLUDE: .*test/.* # exclude test dirs
FILTER_REGEX_EXCLUDE: .*__init__.py/.* # exclude test dirs
FILTER_REGEX_INCLUDE: .*py4DSTEM/.* # only look for py4DSTEM
LINTER_RULES_PATH: / # set toplevel dir as the path to look for rules
PYTHON_FLAKE8_CONFIG_FILE: .flake8 # set specific config file
2 changes: 1 addition & 1 deletion py4DSTEM/braggvectors/braggvectors.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Defines the BraggVectors class

from py4DSTEM.data import Data
from emdfile import Custom, PointListArray, PointList, Metadata
from emdfile import Custom, PointListArray, Metadata
from py4DSTEM.braggvectors.braggvector_methods import BraggVectorMethods
from os.path import basename
import numpy as np
Expand Down
2 changes: 1 addition & 1 deletion py4DSTEM/braggvectors/diskdetection.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def find_Bragg_disks(
mode = "dp"
elif data.ndim == 3:
mode = "dp_stack"
except:
except Exception:
er = f"entry {data} for `data` could not be parsed"
raise Exception(er)

Expand Down
50 changes: 30 additions & 20 deletions py4DSTEM/braggvectors/diskdetection_aiml.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,18 +4,15 @@
"""

import os
import glob
import json
import shutil
import numpy as np
from pathlib import Path


from scipy.ndimage import gaussian_filter
from time import time
from numbers import Number

from emdfile import tqdmnd, PointList, PointListArray
from emdfile import tqdmnd, PointListArray
from py4DSTEM.braggvectors.braggvectors import BraggVectors
from py4DSTEM.data import QPoints
from py4DSTEM.process.utils import get_maxima_2D
Expand Down Expand Up @@ -105,12 +102,14 @@ def find_Bragg_disks_aiml_single_DP(
"""
try:
import crystal4D
except:
raise ImportError("Import Error: Please install crystal4D before proceeding")
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Import Error: Please install crystal4D before proceeding"
)
try:
import tensorflow as tf
except:
raise ImportError(
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Please install tensorflow before proceeding - please check "
+ "https://www.tensorflow.org/install"
+ "for more information"
Expand Down Expand Up @@ -258,8 +257,10 @@ def find_Bragg_disks_aiml_selected(

try:
import crystal4D
except:
raise ImportError("Import Error: Please install crystal4D before proceeding")
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Import Error: Please install crystal4D before proceeding"
)

assert len(Rx) == len(Ry)
peaks = []
Expand Down Expand Up @@ -435,8 +436,10 @@ def find_Bragg_disks_aiml_serial(

try:
import crystal4D
except:
raise ImportError("Import Error: Please install crystal4D before proceeding")
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Import Error: Please install crystal4D before proceeding"
)

# Make the peaks PointListArray
dtype = [("qx", float), ("qy", float), ("intensity", float)]
Expand Down Expand Up @@ -645,8 +648,10 @@ def find_Bragg_disks_aiml(
"""
try:
import crystal4D
except:
raise ImportError("Please install crystal4D before proceeding")
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Import Error: Please install crystal4D before proceeding"
)

def _parse_distributed(distributed):
import os
Expand Down Expand Up @@ -842,7 +847,8 @@ def _integrate_disks(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1):
disks.append(np.average(disk))
try:
disks = disks / max(disks)
except:
# possibly a ZeroDivideError
except Exception:
pass
return (maxima_x, maxima_y, disks)

Expand Down Expand Up @@ -880,8 +886,8 @@ def _get_latest_model(model_path=None):

try:
import tensorflow as tf
except:
raise ImportError(
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Please install tensorflow before proceeding - please check "
+ "https://www.tensorflow.org/install"
+ "for more information"
Expand All @@ -893,8 +899,11 @@ def _get_latest_model(model_path=None):
if model_path is None:
try:
os.mkdir("./tmp")
except:
except FileExistsError:
pass
except Exception as e:
pass
# raise e
# download the json file with the meta data
gdrive_download(
"FCU-Net",
Expand All @@ -912,7 +921,8 @@ def _get_latest_model(model_path=None):
with open("./tmp/model_metadata_old.json") as f_old:
metaold = json.load(f_old)
file_id_old = metaold["file_id"]
except:
# I think just FileNotFoundError
except (FileNotFoundError, Exception):
file_id_old = file_id

if os.path.exists(file_path) and file_id == file_id_old:
Expand All @@ -929,7 +939,7 @@ def _get_latest_model(model_path=None):
gdrive_download(file_id, destination="./tmp", filename=filename.name)
try:
shutil.unpack_archive(filename, "./tmp", format="zip")
except:
except Exception:
pass
model_path = file_path
os.rename("./tmp/model_metadata.json", "./tmp/model_metadata_old.json")
Expand Down
11 changes: 6 additions & 5 deletions py4DSTEM/braggvectors/diskdetection_aiml_cuda.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
from emdfile import tqdmnd
from py4DSTEM.braggvectors.braggvectors import BraggVectors
from emdfile import PointList, PointListArray
from py4DSTEM.data import QPoints
from py4DSTEM.braggvectors.kernels import kernels
from py4DSTEM.braggvectors.diskdetection_aiml import _get_latest_model

Expand All @@ -23,8 +22,8 @@

try:
import tensorflow as tf
except:
raise ImportError(
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Please install tensorflow before proceeding - please check "
+ "https://www.tensorflow.org/install"
+ "for more information"
Expand Down Expand Up @@ -637,7 +636,8 @@ def upsampled_correlation_cp(imageCorr, upsampleFactor, xyShift):
)
dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1])
dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0])
except:
# I think this is just the IndexError
except Exception:
dx, dy = (
0,
0,
Expand Down Expand Up @@ -733,6 +733,7 @@ def _integrate_disks_cp(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1)
disks.append(np.average(disk))
try:
disks = disks / max(disks)
except:
# Possibly ZeroDivisionError
except Exception:
pass
return (maxima_x, maxima_y, disks)
3 changes: 2 additions & 1 deletion py4DSTEM/braggvectors/diskdetection_cuda.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,7 +621,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift):
dy = (icc[1, 2] - icc[1, 0]) / (
4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]
)
except:
# TODO Work out what exception to use IndexError
except Exception:
dx, dy = (
0,
0,
Expand Down
10 changes: 1 addition & 9 deletions py4DSTEM/braggvectors/diskdetection_parallel_new.py
Original file line number Diff line number Diff line change
@@ -1,24 +1,16 @@
import numpy as np
import matplotlib.pyplot as plt
import h5py
import time
import dill

import dask
import dask.array as da
import dask.config
from dask import delayed
from dask.distributed import Client, LocalCluster
from dask.diagnostics import ProgressBar

# import dask.bag as db

# import distributed
from distributed.protocol.serialize import register_serialization_family
import distributed

import py4DSTEM
from emdfile import PointListArray, PointList
from emdfile import PointListArray
from py4DSTEM.braggvectors.diskdetection import _find_Bragg_disks_single_DP_FK


Expand Down
4 changes: 2 additions & 2 deletions py4DSTEM/braggvectors/threshold.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def threshold_Braggpeaks(
pattern
"""
assert all(
[item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"]]
item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"]
), "pointlistarray must include the coordinates 'qx', 'qy', and 'intensity'."
for Rx, Ry in tqdmnd(
pointlistarray.shape[0],
Expand Down Expand Up @@ -112,7 +112,7 @@ def universal_threshold(
assert isinstance(pointlistarray, PointListArray)
assert metric in ("maximum", "average", "median", "manual")
assert all(
[item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"]]
item in pointlistarray.dtype.fields for item in ["qx", "qy", "intensity"]
), "pointlistarray must include the coordinates 'qx', 'qy', and 'intensity'."
_pointlistarray = pointlistarray.copy()
if name is None:
Expand Down
12 changes: 5 additions & 7 deletions py4DSTEM/data/calibration.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,7 @@
# Defines the Calibration class, which stores calibration metadata

import numpy as np
from numbers import Number
from typing import Optional
from warnings import warn

from emdfile import Metadata, Root
from py4DSTEM.data.propagating_calibration import call_calibrate
Expand Down Expand Up @@ -505,7 +503,7 @@ def get_origin(self, rx=None, ry=None):
qx0 = self._get_value("qx0", rx, ry)
qy0 = self._get_value("qy0", rx, ry)
ans = (qx0, qy0)
if any([x is None for x in ans]):
if any(x is None for x in ans):
ans = None
return ans

Expand All @@ -518,7 +516,7 @@ def get_origin_shift(self, rx=None, ry=None):
qx0 = self._get_value("qx0_shift", rx, ry)
qy0 = self._get_value("qy0_shift", rx, ry)
ans = (qx0, qy0)
if any([x is None for x in ans]):
if any(x is None for x in ans):
ans = None
return ans

Expand All @@ -540,7 +538,7 @@ def get_origin_meas(self, rx=None, ry=None):
qx0 = self._get_value("qx0_meas", rx, ry)
qy0 = self._get_value("qy0_meas", rx, ry)
ans = (qx0, qy0)
if any([x is None for x in ans]):
if any(x is None for x in ans):
ans = None
return ans

Expand Down Expand Up @@ -615,7 +613,7 @@ def get_ellipse(self, rx=None, ry=None):
b = self.get_b(rx, ry)
theta = self.get_theta(rx, ry)
ans = (a, b, theta)
if any([x is None for x in ans]):
if any(x is None for x in ans):
ans = None
return ans

Expand Down Expand Up @@ -778,7 +776,7 @@ def get_probe_param(self):
qx0 = self._get_value("qx0")
qy0 = self._get_value("qy0")
ans = (probe_semiangle, qx0, qy0)
if any([x is None for x in ans]):
if any(x is None for x in ans):
ans = None
return ans

Expand Down
Loading