# This code implements a simple perceptron neural network.

import random, string

def dotProduct(a, b):
    sum = 0.0
    for i in range(len(a)):
        sum = sum + a[i] * b[i]
    return sum

def pretty(values):
    return string.join(['%.3f' % v for v in values])

#--------------------------------------------------------------------------------

class Unit:

    # constructor
    def __init__(self, activation=0.0):
        self.activation = activation
        self.incomingConnections = []
        self.outgoingConnections = []

    # updates the activation of the unit based on the input
    def update(self):
        activations = [c.fromUnit.activation for c in self.incomingConnections]
        weights = [c.weight for c in self.incomingConnections]
        netActivation = dotProduct(activations, weights)
        self.activation = self.stepFunction(netActivation)

    # threshold activation function
    def stepFunction(self, x):
        if x > 0:
            return 1
        else:
            return 0

#--------------------------------------------------------------------------------

class Connection:

    # constructor
    def __init__(self, fromUnit, toUnit):
        self.fromUnit = fromUnit
        self.toUnit = toUnit
        self.randomize()

    # sets the connection strength to a small random value
    def randomize(self):
        self.weight = random.uniform(-0.1, +0.1)

#--------------------------------------------------------------------------------

class SimplePerceptron:

    # constructor
    def __init__(self, numInputs):
        # create the units
        self.outputUnit = Unit()
        self.inputLayer = [Unit() for k in range(numInputs)]
        # wire up the network
        self.allConnections = []
        for unit in self.inputLayer:
            self.connect(unit, self.outputUnit)
        # connect the bias unit
        biasUnit = Unit(1.0)
        self.connect(biasUnit, self.outputUnit)
        self.learningRate = 0.1

    # creates a new connection between two units
    def connect(self, fromUnit, toUnit):
        c = Connection(fromUnit, toUnit)
        fromUnit.outgoingConnections.append(c)
        toUnit.incomingConnections.append(c)
        self.allConnections.append(c)

    # used to manually set the weights to particular values
    def setWeights(self, newWeights):
        assert len(newWeights) == len(self.allConnections), 'wrong number of weights'
        for (c, w) in zip(self.allConnections, newWeights):
            c.weight = w

    # sets all weights to small random values
    def initialize(self):
        for c in self.allConnections:
            c.randomize()
        print 'weights randomized'

    # computes the output of the perceptron on the given input pattern
    def propagate(self, pattern):
        assert len(pattern) == len(self.inputLayer), 'wrong pattern size'
        for (inputUnit, value) in zip(self.inputLayer, pattern):
            assert 0 <= value <= 1, 'invalid pattern value %g' % value
            inputUnit.activation = value
        self.outputUnit.update()
        return self.outputUnit.activation

    # shows the perceptron's behavior on all patterns in the current dataset
    def test(self):
        print 'weights =', pretty([c.weight for c in self.allConnections])
        for pattern in self.inputs:
            output = self.propagate(pattern)
            print 'output on %s = %d' % (pattern, output)
        print

    # computes the total sum squared error over all patterns in the dataset
    def computeError(self):
        error = 0.0
        correct = 0
        for (pattern, target) in zip(self.inputs, self.targets):
            output = self.propagate(pattern)
            error = error + (target - output) ** 2
            if output == target:
                correct = correct + 1
        total = len(self.inputs)
        score = 100.0 * correct / total
        return (correct, total, score, error)

    # perceptron learning rule
    def teach(self, pattern, target):
        output = self.propagate(pattern)
        self.outputUnit.error = target - output
        for c in self.allConnections:
            increment = self.learningRate * c.toUnit.error * c.fromUnit.activation
            c.weight = c.weight + increment

    # adjusts the weights for each pattern in the current dataset
    def teachDataset(self):
        assert len(self.inputs) > 0, 'no training data'
        dataset = zip(self.inputs, self.targets)
        random.shuffle(dataset)
        for (pattern, target) in dataset:
            #print '   teaching %s -> %s' % (pattern, target)
            self.teach(pattern, target)

    # trains the network for a given number of cycles
    def train(self, cycles=1000):
        assert len(self.inputs) > 0, 'no training data'
        (correct, total, score, error) = self.computeError()
        print 'Epoch #   0: TSS error %7.4f, %d/%d correct (%.1f%%)' % \
              (error, correct, total, score)
        for t in range(1, cycles+1):
            self.teachDataset()
            (correct, total, score, error) = self.computeError()
            print 'Epoch #%4d: TSS error %7.4f, %d/%d correct (%.1f%%)' % \
                  (t, error, correct, total, score)
            if correct == total:
                print 'All patterns learned'
                break

#--------------------------------------------------------------------------------

n = SimplePerceptron(2)

n.inputs = [[0, 0], [0, 1], [1, 0], [1, 1]]

