Skip to content
This repository has been archived by the owner on Oct 3, 2019. It is now read-only.

Commit

Permalink
Fix bug in casting variables when cuda is enabled
Browse files Browse the repository at this point in the history
  • Loading branch information
GabrielBianconi committed Nov 1, 2017
1 parent bc2f7f1 commit e5e872d
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions rbm.py
Expand Up @@ -42,7 +42,7 @@ def sample_visible(self, hidden_probabilities):
def contrastive_divergence(self, input_data):
# Positive phase
positive_hidden_probabilities = self.sample_hidden(input_data)
positive_hidden_activations = (positive_hidden_probabilities >= self._random_probabilities(self.num_hidden)).type(torch.FloatTensor)
positive_hidden_activations = (positive_hidden_probabilities >= self._random_probabilities(self.num_hidden)).float()
positive_associations = torch.matmul(input_data.t(), positive_hidden_activations)

# Negative phase
Expand All @@ -51,7 +51,7 @@ def contrastive_divergence(self, input_data):
for step in range(self.k):
visible_probabilities = self.sample_visible(hidden_activations)
hidden_probabilities = self.sample_hidden(visible_probabilities)
hidden_activations = (hidden_probabilities >= self._random_probabilities(self.num_hidden)).type(torch.FloatTensor)
hidden_activations = (hidden_probabilities >= self._random_probabilities(self.num_hidden)).float()

negative_visible_probabilities = visible_probabilities
negative_hidden_probabilities = hidden_probabilities
Expand Down

0 comments on commit e5e872d

Please sign in to comment.