Warning: Declaration of action_plugin_tablewidth::register(&$controller) should be compatible with DokuWiki_Action_Plugin::register(Doku_Event_Handler $controller) in /s/bach/b/class/cs545/public_html/fall16/lib/plugins/tablewidth/action.php on line 93
code:perceptron [CS545 fall 2016]

User Tools

Site Tools


code:perceptron

Differences

This shows you the differences between two versions of the page.

Link to this comparison view

Both sides previous revision Previous revision
Next revision
Previous revision
Last revision Both sides next revision
code:perceptron [2013/09/06 09:55]
asa
code:perceptron [2016/08/09 10:25]
127.0.0.1 external edit
Line 7: Line 7:
  
 import numpy as np import numpy as np
-from PyML.classifiers.baseClassifiers ​import ​Classifier+from matplotlib ​import ​pyplot as plt
  
-class Perceptron ​(Classifier) ​:+class Perceptron :
  
-    """​An implementation of the perceptron algorithm"""​+    """​An implementation of the perceptron algorithm
 +    Note that this implementation does not include a bias term"""​
  
-    ​attributes = {'max_iterations': 50, +    ​def __init__(self, ​max_iterations=100, learning_rate=0.2) :
-                  'learning_rate' : 0.2}+
  
-    def __init__(self, arg None, **args) :+        ​self.max_iterations ​max_iterations 
 +        self.learning_rate = learning_rate
  
-        Classifier.__init__(self, ​arg**args)+    def fit(self, ​Xy
 +        """​ 
 +        Train a classifier using the perceptron training algorithm. 
 +        After training the attribute '​w'​ will contain the perceptron weight vector.
  
 +        Parameters
 +        ----------
  
-    def train(selfdata, **args:+        X : ndarray, shape (num_examplesn_features) 
 +        Training data.
  
-        ​Classifier.train(selfdata, **args) +        ​y : ndarray, shape (n_examples,) 
- +        ​Array of labels. 
-        ​if data.labels.numClasses != 2 : +         
-            raise ValueError, ​"not a binary classification problem" +        """​ 
- +        self.np.zeros(len(X[0]))
-        self.bias +
-        data.addFeature('​bias', ​[1.for i in range(len(data))]) +
-        num_features = data.numFeatures +
-        self.w = np.zeros(data.numFeatures)+
         converged = False         converged = False
         iterations = 0         iterations = 0
         while (not converged and iterations < self.max_iterations) :         while (not converged and iterations < self.max_iterations) :
             converged = True             converged = True
-            for i in range(len(data)) : +            for i in range(len(X)) : 
-                ​label = 2 * data.labels.Y[i] -1 # convert the labels from 0,1 to -1, +1 +                ​if y[i] * self.discriminant(X[i]) <= 0 : 
-                if label * self.decisionFunc(data, i) <= 0 : +                    self.w = self.w + y[i] * self.learning_rate * X[i]
-                    self.w = self.w + label * self.learning_rate * data.X[i]+
                     converged = False                     converged = False
 +                    plot_data(X,​ y, self.w)
             iterations += 1             iterations += 1
-        data.eliminateFeatures([data.numFeatures -1]) 
-        self.bias = self.w[-1] 
-        self.w = self.w[:-1] 
         self.converged = converged         self.converged = converged
         if converged :         if converged :
-            print '​converged in %d iterations '% iterations +            print '​converged in %d iterations ' % iterations 
-        # this should be the last command in the train function + 
-        self.log.trainingTime = self.getTrainingTime()+    def discriminant(self, x) : 
 +        return np.dot(self.w, x) 
 +             
 +    def predict(self, X
 +        """​ 
 +        make predictions using a trained linear classifier
  
 +        Parameters
 +        ----------
  
-    def decisionFunc(self, data, i:+        X : ndarray, shape (num_examplesn_features) 
 +        Training ​data
 +        """​ 
 +         
 +        scores = np.dot(self.wX) 
 +        return np.sign(scores)
  
-        return ​np.dot(self.w, data.X[i]self.bias+def generate_separable_data(N) : 
 +    xA,yA,xB,yB = [np.random.uniform(-1,​ 1) for i in range(4)] 
 +    w = np.random.uniform(-1,​ 1, 2) 
 +    print w,w.shape 
 +    X = np.random.uniform(-1,​ 1, [N, 2]) 
 +    print X,X.shape 
 +    y = np.sign(np.dot(X, w)) 
 +    return X,y,w 
 +     
 +def plot_data(X,​ y, w) : 
 +    fig = plt.figure(figsize=(5,​5)) 
 +    plt.xlim(-1,​1) 
 +    plt.ylim(-1,​1) 
 +    a = -w[0]/w[1] 
 +    pts = np.linspace(-1,1) 
 +    plt.plot(pts, a*pts, '​k-'​) 
 +    cols = {1: '​r',​ -1: '​b'​} 
 +    for i in range(len(X)):​  
 +        plt.plot(X[i][0], X[i][1], cols[y[i]]+'​o'​) 
 +    plt.show()
  
-    def classify(selfdatai:+if __name__=='​__main__'​ : 
 +    X,y,w = generate_separable_data(40) 
 +    p = Perceptron() 
 +    p.fit(X,y)
  
-        score = self.decisionFunc(data,​ i) 
-        classification = 1 if score > 0 else 0 
-        return (classification,​ score) 
 </​file>​ </​file>​
code/perceptron.txt · Last modified: 2016/09/08 11:04 by asa