You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
601 KiB
601 KiB
None
<html>
<head>
</head>
</html>
In [1]:
from sklearn import datasets
iris = datasets.load_iris()
list(iris.keys())
Out[1]:
In [2]:
iris
Out[2]:
In [3]:
print(iris.DESCR)
In [3]:
import matplotlib.pyplot as plt
import numpy as np
In [12]:
lp=iris.data[:,0:1]
wp=iris.data[:,1:2]
plt.plot(lp,wp,'.k')
Out[12]:
In [13]:
plt.plot(lp,'.k')
Out[13]:
In [17]:
iris.target
Out[17]:
In [29]:
import numpy as np
import matplotlib.pyplot as plt
t=np.linspace(-100, 100, 1000)
sig=1/(1+np.exp(-t+3))
plt.plot(t,sig, '.b', label=r"$\sigma=\frac{1}{1+e^{-t}}$")
plt.legend(loc='upper left', fontsize=20)
plt.axis([-5, 5, -0.1, 1])
plt.plot([0,0], [0,1], 'r--')
Out[29]:
Iris-Setosa Classifier based on petal width¶
In [34]:
X=iris.data[:,1:2]
y=(iris.target==0).astype(int)
In [37]:
from sklearn.linear_model import LogisticRegression
mylr=LogisticRegression(solver='lbfgs', random_state=42)
mylr.fit(X,y)
Out[37]:
In [41]:
Xnew=np.linspace(2,5,70).reshape(-1,1)
yPred=mylr.predict_proba(Xnew)
plt.plot(Xnew,yPred[:,0],'.r', label='No Iris-Set')
plt.plot(Xnew,yPred[:,1],'.b', label='Iris-Set')
plt.legend()
plt.plot(X,y,'*g')
Out[41]:
In [ ]:
In [45]:
# Iris-Setosa Classifier based on petal width
X=iris.data[:,0:1]
y=(iris.target==0).astype(int)
from sklearn.linear_model import LogisticRegression
mylr=LogisticRegression(solver='lbfgs', random_state=42)
mylr.fit(X,y)
Xnew=np.linspace(2,8,70).reshape(-1,1)
yPred=mylr.predict_proba(Xnew)
#plt.plot(Xnew,yPred[:,0],'.r', label='No Iris-Set')
plt.plot(Xnew,yPred[:,1],'.b', label='Iris-Set')
plt.legend()
plt.plot(X,y,'*g')
Out[45]:
In [3]:
import matplotlib.pyplot as plt
pl=iris.data[:,2:3]
pw=iris.data[:,3:]
plt.plot(pl,pw,'sb')
Out[3]:
In [39]:
from sklearn.linear_model import LogisticRegression
X=iris.data[:,2:]
y=(iris.target==2).astype(int)
lrmi=LogisticRegression(solver='lbfgs',
C=10,
random_state=42)
lrmi.fit(X,y)
Out[39]:
In [40]:
import numpy as np
x0, x1=np.meshgrid(
np.linspace(1,6.9,500).reshape(-1,1),
np.linspace(0.1,2.5,200).reshape(-1,1))
Xnew=np.c_[x0.ravel(), x1.ravel()]
yProb=lrmi.predict_proba(Xnew)
In [41]:
plt.figure(figsize=(10,4))
plt.plot(X[y==0,0], X[y==0,1],'bs',label='No Virg')
plt.plot(X[y==1,0], X[y==1,1],'g^',label='Virginica')
zz=yProb[:,1].reshape(x0.shape)
contour=plt.contour(x0,x1,zz)
plt.clabel(contour, inline=1,fontsize=15)
plt.xlabel("Petal Length")
plt.ylabel("Petal Width")
plt.legend()
Out[41]:
Multiclass¶
In [51]:
X=iris.data[:,2:]
y=iris.target
In [52]:
lrmi2=LogisticRegression(multi_class='multinomial',
solver='lbfgs',
C=10,
random_state=42)
lrmi2.fit(X,y)
x0,x1=np.meshgrid(
np.linspace(0,8,500).reshape(-1,1),
np.linspace(0,3.5,200).reshape(-1,1))
Xnew=np.c_[x0.ravel(), x1.ravel()]
yProba=lrmi2.predict_proba(Xnew)
yPred=lrmi2.predict(Xnew)
from matplotlib.colors import ListedColormap
customc=ListedColormap(['#fafab0', '#9898ff', '#a0faa0'])
zz1=yProba[:,1].reshape(x0.shape)
zz=yPred.reshape(x0.shape)
plt.figure(figsize=(10,4))
plt.plot(X[y==2,0], X[y==2,1],'g^',label='Virg')
plt.plot(X[y==1,0], X[y==1,1],'bs',label='Versic')
plt.plot(X[y==0,0], X[y==0,1],'yo',label='Setos')
contour=plt.contour(x0,x1,zz1,cmap=plt.cm.brg)
plt.clabel(contour,inline=1,fontsize=12)
plt.contourf(x0,x1,zz,cmap=customc)
Out[52]:
In [50]:
from sklearn.linear_model import LogisticRegression
X=iris.data[:,2:]
y=(iris.target==2).astype(int)
In [45]:
mlr2=LogisticRegression(solver='lbfgs', C=100**10,
random_state=80)
mlr2.fit(X,y)
Out[45]:
In [46]:
import numpy as np
x0,x1=np.meshgrid(
np.linspace(1,7,500).reshape(-1,1),
np.linspace(0,3,200).reshape(-1,1))
In [47]:
Xnew=np.c_[x0.ravel(), x1.ravel()]
yPred=mlr2.predict_proba(Xnew)
In [48]:
plt.plot(X[y==0, 0], X[y==0,1],'bs')
plt.plot(X[y==1, 0], X[y==1,1],'g^')
zz=yPred[:,1].reshape(x0.shape)
contour=plt.contour(x0,x1,zz)
plt.clabel(contour, inline=1, fontsize=12)
plt.axis([3,7,0.7,3])
Out[48]:
Multiclass classifer¶
In [65]:
X=iris.data[:,2:]
y=iris.target
mlr3=LogisticRegression(
multi_class='multinomial',
solver='lbfgs',
C=10,
random_state=42)
mlr3.fit(X,y)
Out[65]:
In [ ]:
In [66]:
import numpy as np
x0,x1=np.meshgrid(
np.linspace(1,7,500).reshape(-1,1),
np.linspace(0,3,200).reshape(-1,1))
In [67]:
Xnew=np.c_[x0.ravel(), x1.ravel()]
yPred=mlr3.predict_proba(Xnew)
yProba=mlr3.predict(Xnew)
zz=yProba.reshape(x0.shape)
zz1=yPred[:,1].reshape(x0.shape)
In [75]:
from matplotlib.colors import ListedColormap
plt.plot(X[y==2,0], X[y==2,1],'y*', label='Virg')
plt.plot(X[y==1,0], X[y==1,1],'g^', label='Vers')
plt.plot(X[y==0,0], X[y==0,1],'bs', label='Set')
contour=plt.contour(x0,x1,zz1, cmap=plt.cm.brg)
ccmap=ListedColormap(['#fafab0', '#9898ff', '#a0faa0'])
plt.contourf(x0,x1,zz,cmap=ccmap)
plt.clabel(contour, inline=1,fontsize=12)
plt.xlabel('Petal Width', fontsize=12)
plt.ylabel('Petal Length', fontsize=12)
#plt.axis([0,7,0,3.5])
plt.legend()
Out[75]:
Sepal¶
In [76]:
X=iris.data[:,0:2]
y=iris.target
mlr3=LogisticRegression(
multi_class='multinomial',
solver='lbfgs',
C=10,
random_state=42)
mlr3.fit(X,y)
Out[76]:
In [83]:
import numpy as np
x0,x1=np.meshgrid(
np.linspace(0,9,500).reshape(-1,1),
np.linspace(0,7,200).reshape(-1,1))
In [84]:
Xnew=np.c_[x0.ravel(), x1.ravel()]
yPred=mlr3.predict_proba(Xnew)
yProba=mlr3.predict(Xnew)
zz=yProba.reshape(x0.shape)
zz1=yPred[:,1].reshape(x0.shape)
In [85]:
from matplotlib.colors import ListedColormap
plt.plot(X[y==2,0], X[y==2,1],'y*', label='Virg')
plt.plot(X[y==1,0], X[y==1,1],'g^', label='Vers')
plt.plot(X[y==0,0], X[y==0,1],'bs', label='Set')
contour=plt.contour(x0,x1,zz1, cmap=plt.cm.brg)
ccmap=ListedColormap(['#fafab0', '#9898ff', '#a0faa0'])
plt.contourf(x0,x1,zz,cmap=ccmap)
plt.clabel(contour, inline=1,fontsize=12)
plt.xlabel('Sepal Width', fontsize=12)
plt.ylabel('Sepal Length', fontsize=12)
#plt.axis([0,7,0,3.5])
plt.legend()
Out[85]:
In [ ]:
In [ ]: