Skip to content

Instantly share code, notes, and snippets.

@jakeoung
Last active February 11, 2017 12:20
Show Gist options
  • Select an option

  • Save jakeoung/5cdd974b03187ac510e58c840680e44c to your computer and use it in GitHub Desktop.

Select an option

Save jakeoung/5cdd974b03187ac510e58c840680e44c to your computer and use it in GitHub Desktop.
Implementing pytorch functions
import torch.autograd.Variable
max, max_idx = Variable.max(x, 1)
max = Variable.expand_as(max, x)
x = Variable.exp(x - max)
p = Variable(torch.zeros(x.size()))
for i in range(64):
sum = Variable.sum(x[i, :]).data[0]
p[i, :] = x[i, :] / sum
out = Variable.log(p)
### time test for 100 iteration
# backend function: -0.00801706314086914
# my own: -0.6033849716186523
########## time test code
# import time
# tt = time.time()
# for i in range(100):
# y = F.log_softmax(x)
# print('default function', tt - time.time())
# tt = time.time()
# for i in range(100):
# max, max_idx = Variable.max(x, 1)
# max = Variable.expand_as(max, x)
# y = Variable.exp(x - max)
# p = Variable(torch.zeros(y.size()))
# # for i in range(64):
# # sum = Variable.sum(y[i, :]).data[0]
# # p[i, :] = y[i, :] / sum
# # p = Variable.log(p)
# print('my own: ', tt - time.time())
loss_=0
for i in range(log_softmax.size()[0]):
loss_ -= log_softmax[i, target[i].data[0]]
loss_ /= log_softmax.size()[0]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment