From 93d954aa73447e67da164ff596358bc655932b8f Mon Sep 17 00:00:00 2001 From: "A. Unique TensorFlower" Date: Wed, 8 Nov 2023 07:21:34 -0800 Subject: [PATCH] Add a backend optimizer for adafactor. PiperOrigin-RevId: 580526748 --- tf_keras/optimizers/adafactor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tf_keras/optimizers/adafactor.py b/tf_keras/optimizers/adafactor.py index b66c6c5e4..37666e719 100644 --- a/tf_keras/optimizers/adafactor.py +++ b/tf_keras/optimizers/adafactor.py @@ -40,7 +40,7 @@ class Adafactor(optimizer.Optimizer): The default argument setup is based on the original paper (see reference). When gradients are of dimension > 2, Adafactor optimizer will delete the last 2 dimensions separately in its accumulator variables. - + Args: learning_rate: Initial value for the learning rate: either a floating point value,