Adding Contrastive learning models
This commit is contained in:
parent
7c9e75030b
commit
d9d350e191
@ -259,3 +259,39 @@ class CLUBSample(nn.Module): # Sampled version of the CLUB estimator
|
|||||||
def forward(self, x_samples, y_samples):
|
def forward(self, x_samples, y_samples):
|
||||||
mu, logvar = self.get_mu_logvar(x_samples)
|
mu, logvar = self.get_mu_logvar(x_samples)
|
||||||
return - self.loglikeli(x_samples, y_samples)
|
return - self.loglikeli(x_samples, y_samples)
|
||||||
|
|
||||||
|
|
||||||
|
class ProjectionHead(nn.Module):
|
||||||
|
def __init__(self, state_size, action_size, hidden_size):
|
||||||
|
super(ProjectionHead, self).__init__()
|
||||||
|
self.state_size = state_size
|
||||||
|
self.action_size = action_size
|
||||||
|
self.hidden_size = hidden_size
|
||||||
|
|
||||||
|
self.projection_model = nn.Sequential(
|
||||||
|
nn.Linear(state_size + action_size, hidden_size),
|
||||||
|
nn.LayerNorm(hidden_size),
|
||||||
|
nn.ReLU(),
|
||||||
|
nn.Linear(hidden_size, hidden_size),
|
||||||
|
nn.LayerNorm(hidden_size),
|
||||||
|
)
|
||||||
|
|
||||||
|
def forward(self, state, action):
|
||||||
|
x = torch.cat([state, action], dim=-1)
|
||||||
|
x = self.projection_model(x)
|
||||||
|
return x
|
||||||
|
|
||||||
|
|
||||||
|
class ContrastiveHead(nn.Module):
|
||||||
|
def __init__(self, hidden_size, temperature=1):
|
||||||
|
super(ContrastiveHead, self).__init__()
|
||||||
|
self.hidden_size = hidden_size
|
||||||
|
self.temperature = temperature
|
||||||
|
self.W = nn.Parameter(torch.rand(self.hidden_size, self.hidden_size))
|
||||||
|
|
||||||
|
def forward(self, z_a, z_pos):
|
||||||
|
Wz = torch.matmul(self.W, z_pos.T) # (z_dim,B)
|
||||||
|
logits = torch.matmul(z_a, Wz) # (B,B)
|
||||||
|
logits = logits - torch.max(logits, 1)[0][:, None]
|
||||||
|
logits = logits * self.temperature
|
||||||
|
return logits
|
Loading…
Reference in New Issue
Block a user