Primeira vez aqui? Seja bem vindo e cheque o FAQ!
x

Como implementar um perceptron de Rosenblatt?

0 votos
99 visitas
perguntada Jun 10, 2016 em Aprendizagem de Máquinas por danielcajueiro (5,251 pontos)  
Compartilhe

1 Resposta

0 votos
respondida Jun 10, 2016 por danielcajueiro (5,251 pontos)  

Abaixo apresento a implementação em Python seguindo as linhas gerais da seção 4.1.7 do livro Pattern Recognition and Machine Learning - Christopher Bishop:

import matplotlib.pyplot as plt
import numpy as np
from sklearn import linear_model
np.random.seed(50)

fig=plt.figure(num=None, figsize=(8, 4), dpi=80, facecolor='w', edgecolor='k')

step_function = lambda x: 0 if x < 0 else 1

def plot_classificator(beta0,beta1,beta2,x1):
    minx1=np.min(x1)
    maxx1=np.max(x1)    

    minx2=(beta1*minx1+beta0)/(-beta2)
    maxx2=(beta1*maxx1+beta0)/(-beta2)

    plt.plot([minx1,maxx1],[minx2,maxx2],'k-')  


def data_generation(n,beta0,beta1,beta2):
    x1=np.random.normal(1,0.64,n) # Primeiro regressor
    x2=np.random.normal(1,0.36,n) # Segundo regressor
    z=beta0*np.ones(n)+beta1*x1+beta2*x2 # Variavel latente
    y=np.empty([n])
    for i in range(n):
        if(z[i]>0): # In order to ensure that in average each class has the same number of points
            y[i]=1
        else:
            y[i]=-1
    return y,x1,x2

class Perceptron():
    def __init__(self,inputData,target):
            self.numberOfInputs=np.shape(inputData)[0]
            self.numberOfWeights=np.shape(inputData)[1]+1
            self.inputData=np.concatenate((np.ones([self.numberOfInputs,1]), x),axis=1)
            self.target=target
            self.numberOfWeights=np.shape(inputData)[1]+1 
            self.weights=np.random.normal(0,1,[self.numberOfWeights])
    def train_weights(self,numberOfEpochs,eta): 
        error=np.zeros([numberOfEpochs])
        for epoch in range(numberOfEpochs):
            print "epoch: ",epoch
            for i in range(self.numberOfInputs):
                if(np.dot(self.weights,self.inputData[i,:])*self.target[i]<0):
                    self.weights=self.weights+eta*self.inputData[i]*self.target[i]
            for i in range(self.numberOfInputs):                        
                if(np.dot(self.weights,self.inputData[i,:])*self.target[i]<0):        
                    error[epoch]=error[epoch]-np.dot(self.weights,self.inputData[i,:])*self.target[i]
            print error[epoch]
                #Error Evaluation   
        return error 
    def fit(self,x):
        x=np.vstack((x,np.ones([1])))
        return step_function(np.dot(self.weights,x))


if __name__ == '__main__':
    n=100 # numero de observacoes
    beta0=2.5
    beta1=-1.5
    beta2=-1

    # Case 1

    ax=plt.subplot(121)

    [y,x1,x2]=data_generation(n,beta0,beta1,beta2)        
    x=np.stack(( x1.T,x2.T),axis=1)
    myPerceptron=Perceptron(x,y)
    error=myPerceptron.train_weights(10,0.01)
    myWeights=myPerceptron.weights
    myBeta0=myWeights[0]
    myBeta1=myWeights[1]    
    myBeta2=myWeights[2]    

    for i in range(n):
        if(y[i]==1):
            plt.plot(x1[i],x2[i],u'ro',markersize=5, markeredgewidth=0)
        else:
            plt.plot(x1[i],x2[i],u'bo',markersize=5, markeredgewidth=0)   
    ax.set_xlabel('$x_1$')
    ax.set_ylabel('$x_2$')          
    ax.set_title('Perceptron')    

    plot_classificator(myBeta0,myBeta1,myBeta2,x1)


    ax=plt.subplot(122)

    plt.plot(range(np.size(error)),error,'r-')
    ax.set_xlabel('Epochs')
    ax.set_ylabel('Error')          
    ax.set_title('Perceptron')    

    fig.savefig("perceptronClassificator.jpg")  
    # Example Fit
    x=np.ones([2,1])
    print "The output of the perceptron is: ",myPerceptron.fit(x)

Esse código gera as seguintes figuras:

A imagem será apresentada aqui.

...