Last active
August 22, 2017 17:44
-
-
Save dpressel/3691af49fd5c118ea724775dd970f059 to your computer and use it in GitHub Desktop.
Neural Bag of Words addon for baseline
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import torch | |
| import torch.nn as nn | |
| from baseline.pytorch.torchy import classify_bt, append2seq | |
| from baseline.model import Classifier | |
| class NBowModel(nn.Module, Classifier): | |
| def save(self, outname): | |
| print('saving %s' % outname) | |
| torch.save(self, outname) | |
| def create_loss(self): | |
| return nn.NLLLoss() | |
| def __init__(self): | |
| super(NBowModel, self).__init__() | |
| @staticmethod | |
| def load(outname, **kwargs): | |
| model = torch.load(outname) | |
| return model | |
| @staticmethod | |
| def create(embeddings, labels, **kwargs): | |
| pdrop = kwargs.get('dropout', 0.5) | |
| nlayers = kwargs.get('layers', 1) | |
| hsz = kwargs['hsz'] | |
| unif = kwargs['unif'] | |
| dsz = embeddings.dsz | |
| model = NBowModel() | |
| model.labels = labels | |
| nc = len(labels) | |
| model.vocab = embeddings.vocab | |
| model.lut = nn.Embedding(embeddings.vsz + 1, dsz) | |
| del model.lut.weight | |
| model.lut.weight = nn.Parameter(torch.FloatTensor(embeddings.weights), | |
| requires_grad=True) | |
| model.proj = nn.Linear(dsz, hsz) | |
| model.fconns = nn.Sequential() | |
| input_dim = hsz | |
| # Using additional hidden layer? | |
| layers = [] | |
| for i in range(nlayers): | |
| layers.append(nn.Linear(input_dim, input_dim)) | |
| layers.append(nn.BatchNorm1d(input_dim)) | |
| layers.append(nn.ReLU()) | |
| layers.append(nn.Dropout(pdrop)) | |
| layers.append(nn.Linear(input_dim, nc)) | |
| layers.append(nn.LogSoftmax()) | |
| append2seq(model.fconns, layers) | |
| return model | |
| def forward(self, input): | |
| # BxTxC | |
| embeddings = self.lut(input) | |
| dsum = torch.sum(embeddings, 1, False) | |
| proj_out = self.proj(dsum) | |
| output = self.fconns(proj_out) | |
| return output | |
| def classify(self, batch_time): | |
| return classify_bt(self, batch_time) | |
| def get_labels(self): | |
| return self.labels | |
| def get_vocab(self): | |
| return self.vocab | |
| def create_model(embeddings, labels, **kwargs): | |
| kwargs['hsz'] = kwargs.get('cmotsz', 100) | |
| print(kwargs['hsz']) | |
| return NBowModel.create(embeddings, labels, **kwargs) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment