- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, j/ i7 s& b/ E6 l2 v* zimport matplotlib.pyplot as plt
) S; c* o: d: U$ p" h) c* b- W
; E1 O6 W* Q. B5 h! r9 z) ?import utilities
) y. u4 _- c( T" m( e
, a9 ]+ S" a3 N# Load input data5 S4 r6 W+ t' k; x
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
. x7 X Q% u$ o* g; x! a8 C1 C+ ~X, y = utilities.load_data(input_file)6 j; T1 w6 G2 f
9 X2 i% Z* ^) G& r+ l###############################################
" x+ X6 e% L& b' e6 Q l8 u" G# Separate the data into classes based on 'y'
Y6 \# A! U+ ^2 v- Uclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
. H$ q# G# p: r) ~* ~class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])6 \+ S& Q- D5 I; j) Q
3 A6 H0 Y3 S# T# e( v: ]
# Plot the input data
& a5 T9 R& I7 i- f# F4 J' }1 Zplt.figure()
! l6 q% y! ?% L8 Z8 b! Rplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
; b4 ^/ Z# B8 _1 i! J0 @; y3 ^plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
& d) `; h; Y) q1 o9 |4 Y: G. F- iplt.title('Input data') Z6 Y- p/ `: Q* }3 G: s7 z
/ f+ e7 k ?! b8 r5 C
###############################################
v) ^1 E5 j z- q7 i# Train test split and SVM training$ C) s* w) Y0 Y0 P4 o' o
from sklearn import cross_validation
" _. a0 A g) w- K& xfrom sklearn.svm import SVC
. A5 P0 d( ^) f, f+ M
: U" V3 i8 u4 C3 h7 TX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
! Y+ |! O% N6 x/ P7 N
3 Q$ u. t1 C- j1 v. c3 X- ~#params = {'kernel': 'linear'}6 E) Z; V( F. F: z2 M+ H( l
#params = {'kernel': 'poly', 'degree': 3}
" G0 o/ {+ Y* Q6 F+ Y1 {' a, Kparams = {'kernel': 'rbf'}& Q2 N+ t3 t9 Y: r
classifier = SVC(**params)
3 `. R: g6 Z1 D- p2 pclassifier.fit(X_train, y_train)* X4 q( B* g; E0 L% d( n) [& B
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
: @) ~' A( r u0 O5 R* f: r: { b& ~9 H6 L1 y
y_test_pred = classifier.predict(X_test)
, R, G7 k$ E) m% Tutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' i( |9 K+ ~# ?* y( e- X5 u
$ e) g7 v) X5 M# u###############################################$ W: \$ l) ~) Z
# Evaluate classifier performance
" B7 z' q: Z0 P1 k: r$ b/ V4 H; M
# a5 |' X* W. k r2 y2 J, tfrom sklearn.metrics import classification_report
5 A' R/ J$ K) W3 c1 b: Y0 @: L( B, L/ `$ E% p6 Z
target_names = ['Class-' + str(int(i)) for i in set(y)]
4 T) R/ N6 }0 G- Q& E: t, ^& `print "\n" + "#"*30
- ^( x0 Q. \* w: \: N/ E6 P5 f: G; {print "\nClassifier performance on training dataset\n"; m5 X O: d" @
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)' N) v3 p) u" a" ]( N# ~" s$ w
print "#"*30 + "\n"
- U6 q) A2 y5 Z2 e9 Q2 T
) d& ]$ z0 d. ?- {. \5 }( ?print "#"*303 G' s2 |/ j4 L5 E- |* O( Q
print "\nClassification report on test dataset\n"1 O* ?) `6 g+ q
print classification_report(y_test, y_test_pred, target_names=target_names)$ l8 [1 Z1 x w6 M t2 Q: g# [( Y
print "#"*30 + "\n"
7 l9 E% @2 @ m" y$ c
6 E5 R& ?- ] h |
|