We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent cbef352 commit bd94e75Copy full SHA for bd94e75
passl/modeling/backbones/clip.py
@@ -314,7 +314,8 @@ def encode_text(self, text):
314
return x
315
316
def clip_logit_scale(self):
317
- self.logit_scale.clip(-4.6, 4.6)
+ logit_scale_buffer = self.logit_scale.clip_(-4.6, 4.6)
318
+ logit_scale_buffer._share_buffer_to(self.logit_scale)
319
320
def forward(self, image, text, is_train=True):
321
image_features = self.encode_image(image)
0 commit comments