From 43c953e23150ee1734ce8ca644dc93cfce3bd478 Mon Sep 17 00:00:00 2001 From: Jong Wook Kim Date: Mon, 22 Mar 2021 18:07:08 -0400 Subject: [PATCH 1/2] Correctly initializing the logit scale parameter cf. #46 --- clip/model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/clip/model.py b/clip/model.py index 1ddd908..2a09245 100644 --- a/clip/model.py +++ b/clip/model.py @@ -287,7 +287,7 @@ class CLIP(nn.Module): self.ln_final = LayerNorm(transformer_width) self.text_projection = nn.Parameter(torch.empty(transformer_width, embed_dim)) - self.logit_scale = nn.Parameter(torch.ones([])) + self.logit_scale = nn.Parameter(torch.FloatTensor([np.log(1/0.07)])) self.initialize_parameters() From 290ac5cb1558745e56ea8cbcfcf8a9b8ba37182c Mon Sep 17 00:00:00 2001 From: Jong Wook Kim Date: Mon, 22 Mar 2021 22:09:57 -0400 Subject: [PATCH 2/2] Correctly initializing the logit scale parameter adding numpy import --- clip/model.py | 1 + 1 file changed, 1 insertion(+) diff --git a/clip/model.py b/clip/model.py index 2a09245..9cf262a 100644 --- a/clip/model.py +++ b/clip/model.py @@ -1,6 +1,7 @@ from collections import OrderedDict from typing import Tuple, Union +import numpy as np import torch import torch.nn.functional as F from torch import nn