Skip to content

Commit 29e4a4b

Browse files
aliafzalfacebook-github-bot
authored andcommitted
Reverting " Obey training.planner.log_plan flag on if to log sharding plan" (#3279)
Summary: Pull Request resolved: #3279 Reverting non standard logging change Reviewed By: TroyGarden Differential Revision: D80168907 fbshipit-source-id: ec138da1e81d8318b5ddbc27c61d6e00d6aa1351
1 parent 9e0b90f commit 29e4a4b

File tree

1 file changed

+0
-5
lines changed

1 file changed

+0
-5
lines changed

torchrec/distributed/planner/planners.py

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -513,10 +513,7 @@ def plan(
513513
sharding_plan = to_sharding_plan(best_plan, self._topology)
514514

515515
end_time = perf_counter()
516-
shall_log_sharding_plan = False
517516
for stats in self._stats:
518-
if not isinstance(stats, NoopEmbeddingStats):
519-
shall_log_sharding_plan = True
520517
stats.log(
521518
sharding_plan=sharding_plan,
522519
topology=self._topology,
@@ -531,8 +528,6 @@ def plan(
531528
sharders=sharders,
532529
debug=self._debug,
533530
)
534-
if shall_log_sharding_plan:
535-
logger.info(f"Found sharding plan {sharding_plan}")
536531
return sharding_plan
537532
else:
538533
global_storage_capacity = reduce(

0 commit comments

Comments
 (0)