|
| 1 | +#!/usr/bin/env python |
| 2 | +# coding: utf-8 |
| 3 | + |
| 4 | +# # Logistic Regression from scratch |
| 5 | + |
| 6 | +# In[62]: |
| 7 | + |
| 8 | + |
| 9 | +''' Implementing logistic regression for classification problem |
| 10 | + Helpful resources : 1.Coursera ML course 2.https://medium.com/@martinpella/logistic-regression-from-scratch-in-python-124c5636b8ac''' |
| 11 | + |
| 12 | + |
| 13 | +# In[63]: |
| 14 | + |
| 15 | + |
| 16 | +#importing all the required libraries |
| 17 | +import numpy as np |
| 18 | +import matplotlib.pyplot as plt |
| 19 | +get_ipython().run_line_magic('matplotlib', 'inline') |
| 20 | +from sklearn import datasets |
| 21 | + |
| 22 | + |
| 23 | +# In[67]: |
| 24 | + |
| 25 | + |
| 26 | +#sigmoid function or logistic function is used as a hypothesis function in classification problems |
| 27 | +def sigmoid_function(z): |
| 28 | + return 1/(1+np.exp(-z)) |
| 29 | + |
| 30 | + |
| 31 | +def cost_function(h,y): |
| 32 | + return (-y*np.log(h)-(1-y)*np.log(1-h)).mean() |
| 33 | + |
| 34 | +# here alpha is the learning rate, X is the feature matrix,y is the target matrix |
| 35 | +def logistic_reg(alpha,X,y,max_iterations=70000): |
| 36 | + converged=False |
| 37 | + iterations=0 |
| 38 | + theta=np.zeros(X.shape[1]) |
| 39 | + |
| 40 | + |
| 41 | + while not converged: |
| 42 | + z=np.dot(X,theta) |
| 43 | + h=sigmoid_function(z) |
| 44 | + gradient = np.dot(X.T,(h-y))/y.size |
| 45 | + theta=theta-(alpha)*gradient |
| 46 | + |
| 47 | + z=np.dot(X,theta) |
| 48 | + h=sigmoid_function(z) |
| 49 | + J=cost_function(h,y) |
| 50 | + |
| 51 | + |
| 52 | + |
| 53 | + iterations+=1 #update iterations |
| 54 | + |
| 55 | + |
| 56 | + if iterations== max_iterations: |
| 57 | + print("Maximum iterations exceeded!") |
| 58 | + print("Minimal cost function J=",J) |
| 59 | + converged=True |
| 60 | + |
| 61 | + return theta |
| 62 | + |
| 63 | + |
| 64 | + |
| 65 | + |
| 66 | + |
| 67 | + |
| 68 | + |
| 69 | + |
| 70 | +# In[68]: |
| 71 | + |
| 72 | + |
| 73 | +if __name__=='__main__': |
| 74 | + iris=datasets.load_iris() |
| 75 | + X = iris.data[:, :2] |
| 76 | + y = (iris.target != 0) * 1 |
| 77 | + |
| 78 | + alpha=0.1 |
| 79 | + theta=logistic_reg(alpha,X,y,max_iterations=70000) |
| 80 | + print(theta) |
| 81 | + def predict_prob(X): |
| 82 | + return sigmoid_function(np.dot(X,theta)) # predicting the value of probability from the logistic regression algorithm |
| 83 | + |
| 84 | + |
| 85 | + plt.figure(figsize=(10, 6)) |
| 86 | + plt.scatter(X[y == 0][:, 0], X[y == 0][:, 1], color='b', label='0') |
| 87 | + plt.scatter(X[y == 1][:, 0], X[y == 1][:, 1], color='r', label='1') |
| 88 | + x1_min, x1_max = X[:,0].min(), X[:,0].max(), |
| 89 | + x2_min, x2_max = X[:,1].min(), X[:,1].max(), |
| 90 | + xx1, xx2 = np.meshgrid(np.linspace(x1_min, x1_max), np.linspace(x2_min, x2_max)) |
| 91 | + grid = np.c_[xx1.ravel(), xx2.ravel()] |
| 92 | + probs = predict_prob(grid).reshape(xx1.shape) |
| 93 | + plt.contour(xx1, xx2, probs, [0.5], linewidths=1, colors='black'); |
| 94 | + |
| 95 | + plt.legend(); |
| 96 | + |
| 97 | + |
| 98 | + |
0 commit comments