Warning: Declaration of action_plugin_tablewidth::register(&$controller) should be compatible with DokuWiki_Action_Plugin::register(Doku_Event_Handler $controller) in /s/bach/b/class/cs545/public_html/fall16/lib/plugins/tablewidth/action.php on line 93
code:perceptron [CS545 fall 2016]

User Tools

Site Tools


code:perceptron

Differences

This shows you the differences between two versions of the page.

Link to this comparison view

Both sides previous revision Previous revision
Next revision
Previous revision
Next revision Both sides next revision
code:perceptron [2013/09/06 09:55]
asa
code:perceptron [2015/08/28 10:36]
asa
Line 5: Line 5:
  
 <file python perceptron.py>​ <file python perceptron.py>​
 +
  
 import numpy as np import numpy as np
-from PyML.classifiers.baseClassifiers ​import ​Classifier+from matplotlib ​import ​pyplot as plt
  
-class Perceptron ​(Classifier) ​:+class Perceptron :
  
-    """​An implementation of the perceptron algorithm"""​+    """​An implementation of the perceptron algorithm
 +    Note that this implementation does not include a bias term"""​
  
-    ​attributes = {'max_iterations': 50, +    ​def __init__(self, ​max_iterations=100, learning_rate=0.2) :
-                  'learning_rate' : 0.2}+
  
-    def __init__(self, arg None, **args) :+        ​self.max_iterations ​max_iterations 
 +        self.learning_rate = learning_rate
  
-        Classifier.__init__(self, ​arg**args)+    def fit(self, ​Xy:
  
- +        ​self.np.zeros(len(X[0]))
-    def train(self, data, **args) : +
- +
-        Classifier.train(self, data, **args) +
- +
-        if data.labels.numClasses !2 : +
-            raise ValueError, "not a binary classification problem"​ +
- +
-        self.bias = 0 +
-        data.addFeature('​bias', ​[1.for i in range(len(data))]) +
-        num_features = data.numFeatures +
-        self.w = np.zeros(data.numFeatures)+
         converged = False         converged = False
         iterations = 0         iterations = 0
         while (not converged and iterations < self.max_iterations) :         while (not converged and iterations < self.max_iterations) :
             converged = True             converged = True
-            for i in range(len(data)) : +            for i in range(len(X)) : 
-                ​label = 2 * data.labels.Y[i] -1 # convert the labels from 0,1 to -1, +1 +                ​if y[i] * self.discriminant(X[i]) <= 0 : 
-                if label * self.decisionFunc(data, i) <= 0 : +                    self.w = self.w + y[i] * self.learning_rate * X[i]
-                    self.w = self.w + label * self.learning_rate * data.X[i]+
                     converged = False                     converged = False
             iterations += 1             iterations += 1
-        data.eliminateFeatures([data.numFeatures -1]) +            plot_data(X, y, self.w)
-        self.bias = self.w[-1] +
-        self.w = self.w[:-1]+
         self.converged = converged         self.converged = converged
         if converged :         if converged :
-            print '​converged in %d iterations '% iterations +            print '​converged in %d iterations ' % iterations 
-        # this should be the last command in the train function + 
-        self.log.trainingTime = self.getTrainingTime()+    def discriminant(self, x) : 
 +        return np.dot(self.w, x) 
 +             
 +    def predict(self, X:
  
 +        scores = np.dot(self.w,​ X)
 +        return np.sign(scores)
  
-    def decisionFunc(self,​ data, i) : 
  
-        return ​np.dot(self.w, data.X[i]self.bias+def generate_separable_data(N) : 
 +    xA,yA,xB,yB = [np.random.uniform(-1,​ 1) for i in range(4)] 
 +    w = np.random.uniform(-1,​ 1, 2) 
 +    print w,w.shape 
 +    X = np.random.uniform(-1,​ 1, [N, 2]) 
 +    print X,X.shape 
 +    y = np.sign(np.dot(X, w)) 
 +    return X,y,w 
 +     
 +def plot_data(X,​ y, w) : 
 +    fig = plt.figure(figsize=(5,​5)) 
 +    plt.xlim(-1,​1) 
 +    plt.ylim(-1,​1) 
 +    a = -w[0]/w[1] 
 +    pts = np.linspace(-1,1) 
 +    plt.plot(pts, a*pts, '​k-'​) 
 +    cols = {1: '​r',​ -1: '​b'​} 
 +    for i in range(len(X)):​  
 +        plt.plot(X[i][0], X[i][1], cols[y[i]]+'​o'​) 
 +    plt.show()
  
-    def classify(selfdatai:+if __name__=='​__main__'​ : 
 +    X,y,w = generate_separable_data(20) 
 +    p = Perceptron() 
 +    p.fit(X,y)
  
-        score = self.decisionFunc(data,​ i) 
-        classification = 1 if score > 0 else 0 
-        return (classification,​ score) 
 </​file>​ </​file>​
code/perceptron.txt · Last modified: 2016/09/08 11:04 by asa