Logistic Regression codes

Logistic Regression
In [5]:
%matplotlib inline
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['figure.figsize']=(20.0,10.0)


data= pd.read_csv(r"C:\Users\huzaifa\Downloads\headbrain\headbrain.csv")
print(data.shape)
data.head()
(237, 4)
Out[5]:
Gender Age Range Head Size(cm^3) Brain Weight(grams)
0 1 1 4512 1530
1 1 1 3738 1297
2 1 1 4261 1335
3 1 1 3777 1282
4 1 1 4177 1590
In [15]:
X=data['Head Size(cm^3)'].values
Y=data['Brain Weight(grams)'].values
In [17]:
# Mean X & Y
mean_x = np.mean(x)
mean_y = np.mean(y)
# total number of values
m=len(X)

# using the formula to calculate b1 & b0
numer=0
denom=0
for i in range(m):
    numer += (X[i]-mean_x) * (Y[i] - mean_y)
    denom += (X[i]-mean_x) **2
b1 = numer/denom
b0 = mean_y - (b1 * mean_x)
print(b1,b0)
0.2634293394893993 325.5734210494426
In [18]:
# plotting values and regression Line
max_x = np.max(X)+100
min_x = np.min(X)-100

# calculating line x & y 
x=np.linspace(min_x,max_x,1000)
y= b0 + b1 * x

# plotting line
plt.plot(x,y,color='#58b970', label='Regression Line')

plt.scatter(X,Y, c='#ef5423', label='Scatter plot')

plt.xlabel('Head size in cm3')
plt.ylabel('brain weight in grams')
plt.legend()
plt.show()
In [21]:
ss_t = 0 #total sum of squares
ss_r = 0   #total sum of sqaures of residuals
for i in range(m):
    y_pred =  b0 + b1 * X[i]
    ss_t += (Y[i] - mean_y) **2
    ss_r += (Y[i] - y_pred) **2
r2= 1-(ss_r/ss_t)
print(r2)
0.6557167531127441
In [25]:
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error


X=X.reshape((m, 1))

# Creating ModeL
reg = LinearRegression()
# fitting training data
reg = reg.fit(X, Y)
# Y prediction
Y_pred=reg.predict(X)

# calculating R2 Score

r2_score=reg.score(X, Y)

print(r2_score)
0.639311719957
In [ ]:
 

Comments

Popular posts from this blog

Interview Preparation Kit

Dinosaurus_Island_Character_level_language_model

How to crack the interviews and get a decent job?