Skip to content

Instantly share code, notes, and snippets.

@cedias
Created September 29, 2016 09:35
Show Gist options
  • Select an option

  • Save cedias/c2b88ad20dfa17378b74f32a5d22caa2 to your computer and use it in GitHub Desktop.

Select an option

Save cedias/c2b88ad20dfa17378b74f32a5d22caa2 to your computer and use it in GitHub Desktop.
bugged
from __future__ import print_function
from keras.layers.recurrent import Recurrent
from keras import backend as K
from keras.engine.topology import InputSpec
from keras import activations, initializations
from keras.models import Sequential
from keras.layers import Dense, Activation, Dropout, LSTM
from keras.optimizers import RMSprop
import numpy as np
import random
import sys
class CTXRNN(Recurrent):
def __init__(self, output_dim, context_num,
init='orthogonal', inner_init='orthogonal',
activation='relu', **kwargs):
self.output_dim = output_dim
self.context_num = context_num
self.init = initializations.get(init)
self.inner_init = initializations.get(inner_init)
self.activation = activations.get(activation)
super(CTXRNN, self).__init__(**kwargs)
def build(self,input_shape):
self.input_spec = [InputSpec(shape=input_shape)]
if self.stateful:
self.reset_states()
else:
# initial states: all-zero tensor of shape (output_dim)
self.states = [None]
print(input_shape[2])
input_dim = input_shape[2]
self.input_dim = input_dim
self.REC = self.inner_init((self.output_dim, self.output_dim,self.context_num))
self.INP = self.init((input_dim, self.output_dim))
self.trainable_weights = [self.REC,self.INP]
if self.initial_weights is not None:
self.set_weights(self.initial_weights)
del self.initial_weights
def step(self, i, states):
# states only contains the previous h and s.
hm1 = states[0]
#i = K.expand_dims(i) #K.reshape(i,(128,10,1))
#dot = K.dot(self.REC, i)
#dot = K.squeeze(dot,axis=3)
#rec = K.transpose(dot)
rec = K.dot(self.REC,i)
h = K.dot(i,self.INP) + K.dot(hm1, rec )
return h, [h]
def get_config(self):
config = {"output_dim": self.output_dim,
"init": self.init.__name__,
"context_num": self.context_num,
"alpha": self.alpha,
"inner_init": self.inner_init.__name__,
"activation": self.activation.__name__}
base_config = super(CTXRNN, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
#################################################################
maxlen = 5
step = 1
sentences = []
next_chars = []
text = "Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text Sample text !"
chars = sorted(list(set(text)))
char_indices = dict((c, i) for i, c in enumerate(chars))
indices_char = dict((i, c) for i, c in enumerate(chars))
for i in range(0, len(text) - maxlen, step):
sentences.append(text[i: i + maxlen])
next_chars.append(text[i + maxlen])
X = np.zeros((len(sentences), maxlen, len(chars)), dtype=np.bool)
y = np.zeros((len(sentences), len(chars)), dtype=np.bool)
for i, sentence in enumerate(sentences):
for t, char in enumerate(sentence):
X[i, t, char_indices[char]] = 1
y[i, char_indices[next_chars[i]]] = 1
model = Sequential()
model.add(CTXRNN(64,context_num=len(chars), input_shape=(maxlen, len(chars))))
model.add(Dense(len(chars)))
model.add(Activation('softmax'))
optimizer = RMSprop(lr=0.01)
model.compile(loss='categorical_crossentropy', optimizer=optimizer)
model.fit(X, y, batch_size=128, nb_epoch=1)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment