Last active
August 22, 2017 17:43
-
-
Save dpressel/12e94b105dc3b2a60884a43181e0e6cc to your computer and use it in GitHub Desktop.
Max over time instead of sum of embeddings
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import torch | |
| import torch.nn as nn | |
| from baseline.pytorch.torchy import classify_bt, append2seq | |
| from baseline.model import Classifier | |
| class NBowMaxModel(nn.Module, Classifier): | |
| def save(self, outname): | |
| print('saving %s' % outname) | |
| torch.save(self, outname) | |
| def create_loss(self): | |
| return nn.NLLLoss() | |
| def __init__(self): | |
| super(NBowMaxModel, self).__init__() | |
| @staticmethod | |
| def load(outname, **kwargs): | |
| model = torch.load(outname) | |
| return model | |
| @staticmethod | |
| def create(embeddings, labels, **kwargs): | |
| pdrop = kwargs.get('dropout', 0.5) | |
| nlayers = kwargs.get('layers', 1) | |
| hsz = kwargs['hsz'] | |
| unif = kwargs['unif'] | |
| dsz = embeddings.dsz | |
| model = NBowMaxModel() | |
| model.labels = labels | |
| nc = len(labels) | |
| model.vocab = embeddings.vocab | |
| model.lut = nn.Embedding(embeddings.vsz + 1, dsz) | |
| del model.lut.weight | |
| model.lut.weight = nn.Parameter(torch.FloatTensor(embeddings.weights), | |
| requires_grad=True) | |
| model.proj = nn.Linear(dsz, hsz) | |
| model.fconns = nn.Sequential() | |
| input_dim = hsz | |
| # Using additional hidden layer? | |
| layers = [] | |
| for i in range(nlayers): | |
| layers.append(nn.Linear(input_dim, input_dim)) | |
| layers.append(nn.BatchNorm1d(input_dim)) | |
| layers.append(nn.ReLU()) | |
| layers.append(nn.Dropout(pdrop)) | |
| layers.append(nn.Linear(input_dim, nc)) | |
| layers.append(nn.LogSoftmax()) | |
| append2seq(model.fconns, layers) | |
| return model | |
| def forward(self, input): | |
| # BxTxC | |
| embeddings = self.lut(input) | |
| dmax, _ = torch.max(embeddings, 1, False) | |
| proj_out = self.proj(dmax) | |
| output = self.fconns(proj_out) | |
| return output | |
| def classify(self, batch_time): | |
| return classify_bt(self, batch_time) | |
| def get_labels(self): | |
| return self.labels | |
| def get_vocab(self): | |
| return self.vocab | |
| def create_model(embeddings, labels, **kwargs): | |
| kwargs['hsz'] = kwargs.get('cmotsz', 100) | |
| print(kwargs['hsz']) | |
| return NBowMaxModel.create(embeddings, labels, **kwargs) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment