Skip to content

Commit da3548c

Browse files
committed
adds failing codes to ignores list to be fixed later
1 parent dacf5bb commit da3548c

File tree

3 files changed

+64
-18
lines changed

3 files changed

+64
-18
lines changed

pyproject.toml

Lines changed: 48 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -78,8 +78,55 @@ ignore = [
7878
"D206", # Docstrings should be indented with spaces; unnecessary when running ruff-format
7979
"E501", # Line length too long; unnecessary when running ruff-format
8080
"W191", # Indentation contains tabs; unnecessary when running ruff-format
81-
]
8281

82+
# FIX AND REMOVE BELOW CODES:
83+
"ANN202", # Missing return type annotation for private function
84+
"ANN001", # Missing type annotation for function argument
85+
"D102", # Missing docstring in public method
86+
"SIM117", # Use a single `with` statement with multiple contexts instead of nested `with` statements
87+
"PT027", # Use `pytest.raises` instead of unittest-style `assertRaisesRegex`
88+
"PT009", # Use a regular `assert` instead of unittest-style `assertEqual` / `assertIsInstance`
89+
"PD011", # Use `.to_numpy()` instead of `.values`
90+
"D101", # Missing docstring in public class
91+
"D401", # First line of docstring should be in imperative mood
92+
"FIX002", # Line contains TODO, consider resolving the issue
93+
"RET505", # Unnecessary `else` after `return` statement
94+
"E721", # Use `is` and `is not` for type comparisons, or `isinstance()` for isinstance checks
95+
"UP031", # Use format specifiers instead of percent format
96+
"E731", # Do not assign a `lambda` expression, use a `def`
97+
"ARG005", # Unused lambda argument
98+
"ARG001", # Unused function argument
99+
"SIM102", # Use a single `if` statement instead of nested `if` statements
100+
"RET504", # Unnecessary assignment to `result` before `return` statement
101+
"N802", # Function name should be lowercase
102+
"RET506", # Unnecessary `else` after `raise` statement
103+
"B904", # Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None`
104+
"ARG002", # Unused method argument
105+
"ERA001", # Found commented-out code
106+
"RET503", # Missing explicit `return` at the end of function able to return non-`None` value
107+
"F401", # `module` imported but unused
108+
"D103", # Missing docstring in public function
109+
"F403", # `from module import *` used; unable to detect undefined names
110+
"ANN401", # Dynamically typed expressions (typing.Any) are disallowed
111+
"ANN206", # Missing return type annotation for classmethod
112+
"ANN102", # Missing type annotation for `cls` in classmethod
113+
"D107", # Missing docstring in `__init__`
114+
"UP028", # Replace `yield` over `for` loop with `yield from`
115+
"B023", # Function definition does not bind loop variable
116+
"UP032", # Use f-string instead of `format` call
117+
"E741", # Ambiguous variable name
118+
"N803", # Argument name should be lowercase
119+
"ANN205", # Missing return type annotation for staticmethod
120+
"UP029", # Unnecessary builtin import
121+
"SIM105", # Use `contextlib.suppress(KeyError)` instead of `try`-`except`-`pass`
122+
"SIM118", # Use `key in dict` instead of `key in dict.keys()`
123+
"F811", # Redefinition of unused name
124+
"UP008", # Use `super()` instead of `super(__class__, self)`
125+
"D417", # Missing argument description in the docstring
126+
"SIM103", # Return the condition directly
127+
"D404", # First word of the docstring should not be "This"
128+
"NPY002", # Replace legacy `np.random.uniform` call with `np.random.Generator`
129+
]
83130

84131
[tool.ruff.lint.per-file-ignores]
85132
"__init__.py" = ["F401"]

tensorflow_transform/beam/combiner_packing_util.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -120,8 +120,8 @@ def _maybe_add_packable_combine(self, operation_def, input_values):
120120
class _PackAccumulateCombineVisitor(_ValidationVisitor):
121121
r"""A visitor that packs combine nodes in the graph.
122122
123-
This visitor takes the grouped combines and performs the packing of those
124-
combines.
123+
This visitor takes the grouped combines and performs the packing of those
124+
combines.
125125
Before packing
126126
GrandParentNode
127127
/ \
@@ -136,9 +136,9 @@ class _PackAccumulateCombineVisitor(_ValidationVisitor):
136136
/ \
137137
ExtractFromDict1' ExtractFromDict2'
138138
139-
The ExtractFromDict nodes after packing extracts the accumulator corresponding
140-
to the individual combines.
141-
"""
139+
The ExtractFromDict nodes after packing extracts the accumulator corresponding
140+
to the individual combines.
141+
"""
142142

143143
def __init__(self, packable_combines):
144144
super().__init__()
@@ -246,8 +246,8 @@ def _maybe_add_packable_combine(self, operation_def, input_values):
246246
class _PackMergeCombineVisitor(_ValidationVisitor):
247247
r"""A visitor that inspects the graph and looks for combine nodes.
248248
249-
This visitor takes the grouped combines and performs the packing of those
250-
combines.
249+
This visitor takes the grouped combines and performs the packing of those
250+
combines.
251251
Before packing
252252
... ...
253253
/ \
@@ -270,13 +270,13 @@ class _PackMergeCombineVisitor(_ValidationVisitor):
270270
/ \
271271
ExtractPackedCombineMergeOutputs1 ExtractPackedCombineMergeOutputs2
272272
273-
Since the inputs to the final flatten node before the packed merge come from
274-
different paths, we add redundant flatten and packed merge nodes each time we
275-
visit a new input of the final flatten node. At the end of this traversal,
276-
we would have one final packed merge node with a corresponding flatten node
277-
having all the needed inputs, and in addition to this we would have a set of
278-
redundant packed merge and flatten nodes which needs to be removed.
279-
"""
273+
Since the inputs to the final flatten node before the packed merge come from
274+
different paths, we add redundant flatten and packed merge nodes each time we
275+
visit a new input of the final flatten node. At the end of this traversal,
276+
we would have one final packed merge node with a corresponding flatten node
277+
having all the needed inputs, and in addition to this we would have a set of
278+
redundant packed merge and flatten nodes which needs to be removed.
279+
"""
280280

281281
def __init__(self, packable_combine_extract_outputs):
282282
super().__init__()

tensorflow_transform/beam/impl.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1181,9 +1181,8 @@ def expand(self, dataset):
11811181

11821182
if graph.get_collection(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES):
11831183
raise ValueError(
1184-
"The preprocessing function contained trainable variables " "{}".format(
1185-
graph.get_collection_ref(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)
1186-
)
1184+
"The preprocessing function contained trainable variables "
1185+
f"{graph.get_collection_ref(tf.compat.v1.GraphKeys.TRAINABLE_VARIABLES)}"
11871186
)
11881187

11891188
pipeline = (

0 commit comments

Comments
 (0)