- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 ]" d# _" ^9 |; `& L5 P
import matplotlib.pyplot as plt
. d9 \" n8 f! ]* a% n6 H% p% z0 G1 i8 `! j. M
import utilities 5 t: i' ~3 T- d1 M
, }8 A( _: \* i; N# S6 J4 n5 E
# Load input data+ h# q( g9 S% A: s
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'; f/ l8 ~3 G$ _+ c
X, y = utilities.load_data(input_file)
2 {+ h$ E/ f& X0 }6 i! ~$ S: [
# W- Y% N% M' }( n* K###############################################& n ^% \: V0 S6 Z( q; M
# Separate the data into classes based on 'y'. s% P! N6 u& }' k \# ~& I I
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). S* m2 B6 T/ G6 J; s. A# Q- O
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])& _3 C0 a: F) G, q
( |" T$ T# {8 Y1 |
# Plot the input data
/ s+ H' N: D( _) Z. X* X( {plt.figure()
, J% N: y" X9 ^# O0 G+ cplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'). ?0 {! O% _5 [: [4 d1 Y. _
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
3 L5 k7 A2 R, C6 X' E3 u iplt.title('Input data')! \$ ^4 C! k1 H$ T; g r, P, x
8 I- x* ^7 D! y" L0 c- G1 E###############################################
2 q' X5 d$ P) p# Train test split and SVM training# k. y5 T" {: @9 B% k
from sklearn import cross_validation
& v6 ~" e7 t% x, gfrom sklearn.svm import SVC
/ K6 L( a. T9 X2 R# d/ ^- i: l/ V8 B
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
( e2 m, W6 s+ _# _0 e% e3 j7 i3 D4 a& |# l3 j4 `6 T) T+ s% d
#params = {'kernel': 'linear'}
' [# j8 E1 x# }#params = {'kernel': 'poly', 'degree': 3}6 H! B) b4 q' u$ x- c2 ~
params = {'kernel': 'rbf'}
; y2 O2 [7 S& z+ n/ ~7 `& \% zclassifier = SVC(**params)
; b5 a: V' O) eclassifier.fit(X_train, y_train)
. x4 @- E6 b7 t6 u: b. Lutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')( D" c/ o3 H' o# C8 @
( k8 G8 D8 r% g* i7 r
y_test_pred = classifier.predict(X_test)6 _ j' L5 }$ ]& ~- j C
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
0 q4 B( ?; d+ ~3 r# V( I U+ D
/ z" c7 e: `3 M$ r3 |###############################################
" p D& [0 i& ^, C2 C+ G# Evaluate classifier performance5 L8 t7 F) v' u$ s2 |+ O
& u& a6 f7 H! q2 y7 m. W- y
from sklearn.metrics import classification_report" \: |0 E* K* n
/ f5 @& m7 ?9 L0 W3 c& P2 e% V9 H7 ] X
target_names = ['Class-' + str(int(i)) for i in set(y)]% U9 X+ F" y7 C
print "\n" + "#"*30
% [3 p3 Y7 N) Y: Gprint "\nClassifier performance on training dataset\n" U; d2 U" P) t" U8 j
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)2 m1 x; Y% M. |
print "#"*30 + "\n"& E) @; {+ q" F( O* C! {8 V9 ]
/ |% M: Z, ]; h2 s4 y( U; J
print "#"*30& _2 ?! G$ w9 \$ r ~. Z
print "\nClassification report on test dataset\n"* {" `3 e/ A. U: E9 [' `
print classification_report(y_test, y_test_pred, target_names=target_names)
/ u9 l: y* k( ?/ o( y9 Fprint "#"*30 + "\n"& S+ L3 d3 i7 [" y) C' h
& M& q/ E9 X/ {" ]& K \
|
|