Skip to content

Instantly share code, notes, and snippets.

@juanmc2005
Last active December 24, 2016 02:53
Show Gist options
  • Save juanmc2005/5e64033fc75bbd38ed21e8c0fcc20d69 to your computer and use it in GitHub Desktop.
Save juanmc2005/5e64033fc75bbd38ed21e8c0fcc20d69 to your computer and use it in GitHub Desktop.
Perceptron in Haskell
module Perceptron (newPerceptron, predict, train, weights, activation) where
-- A Perceptron implementation
type Weights = [Float]
type Inputs = [Float]
type LearningRate = Float
type Epochs = Int
type ErrorValue = Float
data Activation = Threshold | Sigmoid | HyperbolicTangent deriving Show
data Perceptron = Perceptron Weights Activation deriving Show
newPerceptron :: Int -> Activation -> Perceptron
-- Weights initialized with 0
newPerceptron n a = Perceptron (take n (repeat 0.0)) a
evalA :: Activation -> Float -> Float
evalA Threshold x = if x < 0 then 0 else 1
evalA Sigmoid x = 1 / (1 + e ** (-x)) where e = exp 1
evalA HyperbolicTangent x = (e ** n - 1) / (e ** n + 1)
where e = exp 1
n = 2 * x
weightedSum :: Inputs -> Weights -> Float
-- Dot product between inputs and weights, sumed up
-- Precondition: length inputs == length weigths
weightedSum [] _ = 0.0
weightedSum (x:xs) (w:ws) = x * w + weightedSum xs ws
adjustedWeights :: ErrorValue -> Inputs -> LearningRate -> Weights -> Weights
-- Precondition: length inputs == length weigths
adjustedWeights _ _ _ [] = []
adjustedWeights e (x:xs) eta (w:ws) = (w + eta * e * x):(adjustedWeights e xs eta ws)
predict :: Inputs -> Perceptron -> Float
predict xs (Perceptron ws a) = innerPredict xs ws a
innerPredict :: Inputs -> Weights -> Activation -> Float
innerPredict [] _ _ = error "No inputs to predict"
innerPredict xs ws a = evalA a (weightedSum xs ws)
trainSingle :: Inputs -> Float -> LearningRate -> Perceptron -> Perceptron
trainSingle xs y eta (Perceptron ws a) =
Perceptron (adjustedWeights error xs eta ws) a
where error = y - innerPredict xs ws a
train' :: [Inputs] -> [Float] -> LearningRate -> Perceptron -> [Perceptron]
train' [] _ _ _ = []
train' (xs:xss) (y:ys) eta p = trainedP:(train' xss ys eta trainedP)
where trainedP = trainSingle xs y eta p
trainOnce :: [Inputs] -> [Float] -> LearningRate -> Perceptron -> Perceptron
trainOnce xss ys eta p = last (train' xss ys eta p)
train :: Epochs -> [Inputs] -> [Float] -> LearningRate -> Perceptron -> Perceptron
train n xss ys eta p = last (trainN n xss ys eta p)
trainN :: Epochs -> [Inputs] -> [Float] -> LearningRate -> Perceptron -> [Perceptron]
trainN 0 _ _ _ _ = []
trainN n xss ys eta p = fullyTrainedP:(trainN (n - 1) xss ys eta fullyTrainedP)
where fullyTrainedP = trainOnce xss ys eta p
weights :: Perceptron -> Weights
weights (Perceptron ws _) = ws
activation :: Perceptron -> Activation
activation (Perceptron _ a) = a
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment