Jupyter Notebookなどで、コードを実装して実際に確かめてみましょう。
Jetson Nanoでjupyter-notebookを使う場合
mglearnは入ってないのでインストールしておきます
$sudo pip3 install mglearn
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
import mglearn import numpy as np import matplotlib.pyplot as plt from sklearn.datasets import make_moons moons = make_moons(n_samples=200,noise=0.1,random_state=0) #最初の配列がdata,後ろの配列がtarget x = moons[0] y = moons[1] plt.figure(figsize=(12,8)) mglearn.discrete_scatter(x[:,0],x[:,1],y) plt.plot() # from sklearn.model_selection import train_test_split from sklearn.preprocessing import StandardScaler #本当にSVCでは分類できないのかやってみる from sklearn.svm import LinearSVC x_train,x_test,y_train,y_test = train_test_split(x,y,stratify=y,random_state=0) scaler=StandardScaler() x_train_scaled = scaler.fit_transform(x_train) x_test_scaled = scaler.fit_transform(x_test) lin_svm = LinearSVC().fit(x_train_scaled,y_train) plt.figure(figsize=(12,8)) mglearn.plots.plot_2d_separator(lin_svm,x) mglearn.discrete_scatter(x[:,0],x[:,1],y) plt.xlabel("$x_0$",fontsize=20) plt.ylabel("$x_1$",fontsize=20) #このままでは不可だと分かる #高次特徴空間に写像してやってみる from sklearn.preprocessing import PolynomialFeatures poly = PolynomialFeatures(degree=3) x_train_poly = poly.fit_transform(x_train) x_test_poly = poly.fit_transform(x_test) print(x_train_poly.shape) print(x_train_poly) poly.get_feature_names() #写像したデータをスケーリングして学習 x_tarin_poly_scaled = scaler.fit_transform(x_train_poly) x_test_poly_scaled = scaler.fit_transform(x_test_poly) lin_svm = LinearSVC().fit(x_tarin_poly_scaled,y_train) #testデータで予測 lin_svm.predict(x_test_poly_scaled) == y_test #パイプラインを使って簡素化してみる from sklearn.pipeline import Pipeline poly_svm = Pipeline([ ('poly',PolynomialFeatures(degree=3)), ('scaler',StandardScaler()), ('svm',LinearSVC()) ]) poly_svm.fit(x,y) def plot_decision_function(model): _x0 = np.linspace(-1.5,2.5,100) _x1 = np.linspace(-1.0,1.5,100) x0,x1 = np.meshgrid(_x0,_x1) x = np.c_[x0.ravel(),x1.ravel()] y_pred = model.predict(x).reshape(x0.shape) y_decision = model.decision_function(x).reshape(x0.shape) plt.contourf(x0,x1,y_pred,cmap=plt.cm.brg,alpha=0.2) plt.contourf(x0,x1,y_decision,levels=[y_decision.min(),0,y_decision.max()],alpha=0.3) def plot_dataset(x,y): plt.plot(x[:,0][y==0],x[:,1][y==0],'bo',ms=15) plt.plot(x[:,0][y==1],x[:,1][y==1],'r^',ms=15) plt.xlabel("$x_1$",fontsize=20) plt.ylabel("$x_2$",fontsize=20,rotation=0) plt.figure(figsize=(12,8)) plot_decision_function(poly_svm) plot_dataset(x,y) plt.show() --------------------------------------------------- #カーネル法を使ってみる from sklearn.svm import SVC kernel_svm = Pipeline([ ('scaler',StandardScaler()), ('svm',SVC(kernel='poly',degree=3,coef0=1)) ]) kernel_svm.fit(x,y) plt.figure(figsize=(12,8)) plot_decision_function(kernel_svm) plot_dataset(x,y) plt.show() #degreeを増やした場合 plt.figure(figsize=(20,15)) for i,degree in enumerate([2,3,5,15]): poly_kernel_svm = Pipeline([ ('scaler',StandardScaler()), ('svm',SVC(kernel='poly',degree=degree,coef0=1)) ]) poly_kernel_svm.fit(x,y) plt.subplot(221 + i) plot_decision_function(poly_kernel_svm) plot_dataset(x,y) plt.title("d = {}".format(degree),fontsize=20) plt.show() |
Leave a Reply