- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np' y4 u2 V Y l
import matplotlib.pyplot as plt7 ? b$ K `$ F8 x5 T7 Q k4 t3 m
$ h' r' k4 Z* K0 {+ G! }
import utilities - d" e3 f5 q b5 m2 D; \
$ l3 h7 O% v) }) q: F
# Load input data9 Y0 x& k1 B7 i" T* f" _, d
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'0 n# x+ C9 ` I- A$ `$ v' n2 ]7 t4 H
X, y = utilities.load_data(input_file)
- {: l6 i) q( e/ w) y, G m; t6 L7 c" [# ?
###############################################
* C" f, N9 b0 Q" r1 P' @# Separate the data into classes based on 'y'
) B8 U$ u3 x2 m3 H' D7 T( \class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
' w4 ]6 l" G7 S4 b$ ^class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])9 z! g$ B l" V M" M/ V
, W/ h. e& R# I2 ]- j( l/ u
# Plot the input data
9 x- R7 `: s7 y) w+ w) Fplt.figure()
$ \6 j) i- G' S0 w, T3 E6 T; r4 ^1 wplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')2 E; L( X: K( T4 y2 ~+ o! m
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
@4 Z' x# _: S! mplt.title('Input data')# c8 w2 i; \3 [9 r9 z5 V6 s
1 w' m B1 o# X4 ?###############################################2 i2 E" w8 ~' L/ ^5 k/ B
# Train test split and SVM training
& S: `" ^* W) Vfrom sklearn import cross_validation9 S& b. N9 G- Z" x
from sklearn.svm import SVC% f- c5 G2 w; C2 ^ w" p Y3 v9 l) `1 w
( g, S. V, Q* ]4 @# c3 WX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)' ?7 F5 m) j% X& B0 e! Q/ k, O: I
0 f" \; B% Z1 k0 r1 H2 B1 G
#params = {'kernel': 'linear'}
5 O; x0 j* s* E6 g$ Z3 O/ s" z#params = {'kernel': 'poly', 'degree': 3}
9 S7 A, d& J, V( a7 A! Yparams = {'kernel': 'rbf'}4 d3 d" H5 f& S2 Q. P, b
classifier = SVC(**params)
; @9 A) Y2 W( G9 t {1 T4 Q# ]1 z0 rclassifier.fit(X_train, y_train) u8 [( a) L! C
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 {+ h; [( L9 K, R9 ]5 W* B
" `: O6 O% \7 Ny_test_pred = classifier.predict(X_test)2 K2 Y( l3 a9 |7 L7 ?/ y5 I/ p+ ~
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')* s' ~1 |! S- C/ M6 ?" _1 l' u
7 `4 _: Y5 S$ l: y$ [4 ]- c# E+ l% D% Y###############################################
4 C$ I% U0 Q7 a; S' Y( l d# Evaluate classifier performance0 U2 t' W1 ?' E/ y+ e9 e. T u/ g) _
& P) l& U; q7 v4 Vfrom sklearn.metrics import classification_report. [3 w+ U7 j: y$ R' h- U/ z
7 n, B$ F8 W0 m9 J) x1 Jtarget_names = ['Class-' + str(int(i)) for i in set(y)]; `( O% s9 `! ^0 L. ]
print "\n" + "#"*30
' D* d" R! |& |' a+ dprint "\nClassifier performance on training dataset\n": G& ]3 J1 t' z5 ^3 I( Q y7 D! t
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
. ]7 R+ r- y! N: Y+ }9 oprint "#"*30 + "\n"5 z* f7 q' `! c0 {$ m% Y
& P- g" G. B; {/ O7 kprint "#"*30
* E5 l o( x# p7 Mprint "\nClassification report on test dataset\n"
* O h" V1 n7 X# wprint classification_report(y_test, y_test_pred, target_names=target_names); b$ P& w1 P3 X$ K" t$ s; e$ G
print "#"*30 + "\n"" a0 ?! |) o3 P9 a
# y% m. u! A) i% W! i: Z- g' _
|
|