Skip to content

Instantly share code, notes, and snippets.

@k5trismegistus
Created December 25, 2015 14:53
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save k5trismegistus/9183453eaff433f85695 to your computer and use it in GitHub Desktop.
Save k5trismegistus/9183453eaff433f85695 to your computer and use it in GitHub Desktop.
Python標準ライブラリだけでニューラルネットワークを実装してみた
import random
import itertools
import neuron
from IPython import embed
from IPython.terminal.embed import InteractiveShellEmbed
class NeuralNetwork():
def __init__(self, layers, learn_rate):
self.learn_rate = learn_rate
self.neurons = self.init_neurons(layers)
self.init_synapse()
def init_neurons(self, layers):
neurons = []
for idx, num in enumerate(layers):
if idx == 0:
neurons.append([neuron.InputNeuron(self.learn_rate) for i in range(num)])
elif idx == len(layers) - 1:
neurons.append([neuron.OutputNeuron(self.learn_rate) for i in range(num)])
else:
neurons.append([neuron.MediumNeuron(self.learn_rate) for i in range(num)])
return neurons
def init_synapse(self):
for i in range(len(self.neurons) - 1):
for pair in itertools.product(self.neurons[i], self.neurons[i+1]):
synapse = neuron.Synapse(pair[0], pair[1])
pair[0].dst.append(synapse)
pair[1].src.append(synapse)
def fit(self, input_signals, supervisor_signals):
for input_neuron, input_signal in zip(self.neurons[0], input_signals):
input_neuron.set_value(input_signal)
for layer in self.neurons[1:]:
for neuron in layer:
neuron.set_value()
for output_neuron, supervisor_signal in zip(self.neurons[-1], supervisor_signals):
output_neuron.fit(supervisor_signal)
for layer in self.neurons[-2:0:-1]:
for neuron in layer:
neuron.fit()
def classify(self, input_signals):
for input_neuron, input_signal in zip(self.neurons[0], input_signals):
input_neuron.set_value(input_signal)
for layer in self.neurons[1:]:
for neuron in layer:
neuron.set_value()
return [on.value() for on in self.neurons[-1]]
if __name__ == '__main__':
# タブルの要素数がレイヤー数、左から各レイヤーのニューロン数
layer_setting = (2, 4, 2)
xor_signal = [
((0, 0),(0, 0)),
((0, 1),(0, 1)),
((1, 0),(0, 1)),
((1, 1),(1, 0)),
]
nn = NeuralNetwork(layer_setting, 0.25)
for i in range(400000):
signals = random.choice(xor_signal)
nn.fit(signals[0], signals[1])
embed()
import random
import math
import functools
class Synapse():
def __init__(self, src_neuron, dst_neuron):
self.src = src_neuron
self.dst = dst_neuron
self.weight = (random.random() * 0.2) - 0.1
def sigmoid(x):
return 1 / (1+math.exp(-x))
class BaseNeuron():
def __init__(self, learn_rate):
self._value = 0
self.learn_rate = learn_rate
self._epsilon = 0
def set_value(self):
pass
def value(self):
return self._value
def epsilon(self):
return self._epsilon
class InputNeuron(BaseNeuron):
def __init__(self, learn_rate):
super().__init__(learn_rate)
self.dst = []
def set_value(self, value):
self._value = value
class MediumNeuron(BaseNeuron):
def __init__(self, learn_rate):
super().__init__(learn_rate)
self.src = []
self.dst = []
def set_value(self):
in_signal = functools.reduce(
lambda x, y: x+y, [s.weight * s.src.value() for s in self.src]
)
self._value = sigmoid(in_signal)
def fit(self):
err = sum([s.weight * s.dst.epsilon() for s in self.dst])
self._epsilon = err * self._value * (1 - self._value)
for s in self.src:
s.weight -= self.learn_rate * self._epsilon * s.src.value()
class OutputNeuron(BaseNeuron):
def __init__(self, learn_rate):
super().__init__(learn_rate)
self.src = []
def set_value(self):
in_signal = functools.reduce(
lambda x, y: x+y, [s.weight * s.src.value() for s in self.src]
)
self._value = sigmoid(in_signal)
def fit(self, supervisor_signal):
err = (self._value - supervisor_signal)
self._epsilon = err * self._value * (1 - self._value)
for s in self.src:
s.weight -= self.learn_rate * self._epsilon * s.src.value()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment