Skip to content

Instantly share code, notes, and snippets.

@Lambdanaut
Created March 10, 2014 21:53
Show Gist options
  • Select an option

  • Save Lambdanaut/9475256 to your computer and use it in GitHub Desktop.

Select an option

Save Lambdanaut/9475256 to your computer and use it in GitHub Desktop.
Artificial Neural Network
module Main where
config_input_cells = 1 :: Int
config_hidden_layer_cells = 10 :: Int
config_hidden_layers = 2 :: Int
config_output_cells = 1 :: Int
-- [ [ (Threshhold, [Connection Weights] ) ] ]
type Net = [ [ (Double, [Double] ) ] ]
net :: Net
net = [input_layer] ++ hidden_layers ++ [output_layer]
where input_layer = take config_input_cells $ repeat (0.0, take config_input_cells $ repeat 0.5)
first_hidden_layer = take config_hidden_layer_cells $ repeat (0.0, take config_input_cells $ repeat 0.5)
hidden_layer_cells = take config_hidden_layer_cells $ repeat 0.5
hidden_layers = [first_hidden_layer] ++ (take (max (config_hidden_layers - 1) 0) $ repeat $ take config_hidden_layer_cells $ repeat (0.0, hidden_layer_cells))
output_layer = take config_output_cells $ repeat (0.0, hidden_layer_cells)
propagate :: Net -> [Double] -> [[Double]]
propagate net seed = scanl propagate_function (seed) net
where propagate_function inputs layer = map (neuron_fires inputs) layer
neuron_fires inputs neuron = if fst neuron < (sum $ neuron_inputs inputs neuron) then 1.0 else 0.0
neuron_inputs inputs neuron = zipWith (*) inputs (snd neuron)
--fire :: [(Double, Double)] -> Double -> Bool
--fire inputs threshhold = threshhold < (sum $ map (\(value, weight) -> value * weight) inputs)
--input_layer :: [[Double]] -> [Double]
--input_layer net = head net
--output_layer :: [[Double]] -> [Double]
--output_layer net = last net
main :: IO ()
main = return ()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment