-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLogisticRegression.py
More file actions
105 lines (73 loc) · 2.64 KB
/
LogisticRegression.py
File metadata and controls
105 lines (73 loc) · 2.64 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import numpy as np
import matplotlib.pyplot as plt
def sigmoid(z):
return 1 / (1 + np.exp(-z));
def train(x,y,learningRate,maxIter):
lenWeights = len(x[1,:]);
weights = np.random.rand(lenWeights);
bias = np.random.random();
t = 1;
converged = False;
# Perceptron Algorithm
while not converged and t < maxIter:
targets = [];
for i in range(len(x)):
# Calculate logistic function (sigmoid function)
# The decision function is given by the line w'x + b = 0;
z = np.dot(x[i,:],weights) + bias;
logistic = sigmoid(z);
# Logistic regression probability estimate
if (logistic > 0.5):
target = 1;
else:
target = 0;
# Calculate error and update weights
# Shifts the decision boundary
error = y[i] - target;
weights = weights + (x[i,:] * (learningRate * error));
bias = bias + (learningRate * error);
targets.append(target);
t = t + 1;
if ( list(y) == list(targets) ) == True:
# As soon as a solution is found break out of the loop
converged = True;
return weights,bias
def test(weights, bias, x):
predictions = [];
margins = [];
probabilties = [];
for i in range(len(x)):
# Calculate w'x + b and sigmoid of output
z = np.dot(x[i,:],weights) + bias;
logistic = sigmoid(z);
# Get decision from hardlim function
if (logistic > 0.5):
target = 1;
else:
target = 0;
predictions.append(target);
probabilties.append(logit)
return predictions,probabilties
if __name__ == '__main__':
# Simple AND Gate Test
# There are infinite solutions to the AND gate test so gradient descent will converge to a different solution every time
# You can replace x and y with your own data
x = np.array( [ [0,0], [0,1], [1,0], [1,1] ] );
y = np.array( [0,0,0,1] );
weights,bias = train(x,y,0.02,1000);
predictions,probabilties = test(weights,bias,x);
# Plot decision boundary
# Only can plot if # of features = 2
# For higher dimensional data use a contour plot
# Get the two data points to connect to form a line through the equation: -b/w[i]
decisionPlot = plt.subplot(1,1,1);
decisionPlot.plot(x[0:-1,0],x[0:-1,1],'ro',markersize=10, label="Class 0");
decisionPlot.plot(x[-1,0],x[-1,1],'bo',markersize=10, label="Class 1");
decisionPlot.plot( np.array( [0,(-1*bias)/weights[0]] ) , np.array( [ (-1*bias)/weights[1], 0] ), '--r', label="Decision Boundary Line");
legend = decisionPlot.legend(loc='upper right', shadow=True, fancybox=True)
decisionPlot.set_xlim([-0.5, 1.5]);
decisionPlot.set_ylim([-0.5, 1.5]);
plt.show()
print "Predicted Labels: " + str(predictions)
print "Probability of (event|x[i,:]): " + str(probabilties);
print "Actual Labels: " + str(list(y))