0

Here is my version of an autoencoder written using PyTorch :

import warnings
warnings.filterwarnings('ignore')
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from matplotlib import pyplot as plt
from sklearn import metrics
import datetime
from sklearn.preprocessing import MultiLabelBinarizer
import seaborn as sns
sns.set_style("darkgrid")
from ast import literal_eval
import numpy as np
from sklearn.preprocessing import scale
import seaborn as sns
sns.set_style("darkgrid")
import torch

%matplotlib inline

f = []
f.append(np.random.uniform(0,10,(1 , 10)).flatten())
f.append(np.random.uniform(10,20,(1 , 10)).flatten())
f.append(np.random.uniform(20,30,(1 , 10)).flatten())
x_data = torch.FloatTensor(np.array(f))
x_data

dimensions_input = 10
hidden_layer_nodes = 5
output_dimension = 10

class Model(torch.nn.Module):
    def __init__(self):
        super(Model, self).__init__()
        self.linear = torch.nn.Linear(dimensions_input,hidden_layer_nodes)
        self.sigmoid = torch.nn.Sigmoid()
        self.linear2 = torch.nn.Linear(hidden_layer_nodes,output_dimension)

    def forward(self, x):
        l_out1 = self.linear(x)
        l_out2 = self.sigmoid(l_out1)
        y_pred = self.linear2(l_out2)
        return y_pred

model = Model()

criterion = torch.nn.MSELoss(size_average = False)
optim = torch.optim.SGD(model.parameters(), lr = 0.00001)

def train_model():
    y_data = x_data.clone()
    for i in range(150000):
        y_pred = model(x_data)
        loss = criterion(y_pred, y_data)

        if i % 5000 == 0:
            print(loss)
        optim.zero_grad()

        loss.backward()
        optim.step()

Using x_data.clone() I train the network to learn a feature representation of the input data.

I'm attempting to generate hidden layer weights that match the dimensionality of rows of the input data so that each vector of x_data has a corresponding encoding. But the hidden later is of is a vector of size 5. How to change this network so that a matrix is generated that represents a reduced dimensionality of the input data ?

blue-sky
  • 51,962
  • 152
  • 427
  • 752
  • [Here](https://github.com/benmyara/pytorch-examples/blob/master/code/1_NeuralNetworks/3_autoencoder.py) you can find a minimalist example of autoencoder with pytorch. It uses the `nn.Sequential` interface but this is pretty much the same. – BiBi Jul 24 '18 at 18:07
  • Your code does not run. I had to modify your function `train_model` to make it run: https://gist.github.com/benmyara/ebbae2b99abceee71457ae2107dfba6a – BiBi Jul 24 '18 at 18:15

0 Answers0