Logistic Regression codes
In [5]:
%matplotlib inline
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['figure.figsize']=(20.0,10.0)
data= pd.read_csv(r"C:\Users\huzaifa\Downloads\headbrain\headbrain.csv")
print(data.shape)
data.head()
Out[5]:
In [15]:
X=data['Head Size(cm^3)'].values
Y=data['Brain Weight(grams)'].values
In [17]:
# Mean X & Y
mean_x = np.mean(x)
mean_y = np.mean(y)
# total number of values
m=len(X)
# using the formula to calculate b1 & b0
numer=0
denom=0
for i in range(m):
numer += (X[i]-mean_x) * (Y[i] - mean_y)
denom += (X[i]-mean_x) **2
b1 = numer/denom
b0 = mean_y - (b1 * mean_x)
print(b1,b0)
In [18]:
# plotting values and regression Line
max_x = np.max(X)+100
min_x = np.min(X)-100
# calculating line x & y
x=np.linspace(min_x,max_x,1000)
y= b0 + b1 * x
# plotting line
plt.plot(x,y,color='#58b970', label='Regression Line')
plt.scatter(X,Y, c='#ef5423', label='Scatter plot')
plt.xlabel('Head size in cm3')
plt.ylabel('brain weight in grams')
plt.legend()
plt.show()
In [21]:
ss_t = 0 #total sum of squares
ss_r = 0 #total sum of sqaures of residuals
for i in range(m):
y_pred = b0 + b1 * X[i]
ss_t += (Y[i] - mean_y) **2
ss_r += (Y[i] - y_pred) **2
r2= 1-(ss_r/ss_t)
print(r2)
In [25]:
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error
X=X.reshape((m, 1))
# Creating ModeL
reg = LinearRegression()
# fitting training data
reg = reg.fit(X, Y)
# Y prediction
Y_pred=reg.predict(X)
# calculating R2 Score
r2_score=reg.score(X, Y)
print(r2_score)
In [ ]:
Comments
Post a Comment