- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
( M1 z7 R! C1 W6 P7 X. ^5 pimport matplotlib.pyplot as plt: t I Y2 V7 U7 v1 N
) f5 L1 P) y, V$ Q# `" o W
import utilities
" j% ?" X: }8 }8 p. C% b3 C( @8 Q
4 l% G5 G7 T; [# Load input data9 p r" u$ F. S# a3 f
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 D! H1 e. g$ S& c0 y' WX, y = utilities.load_data(input_file)* l& d7 m5 S2 E$ R: i
0 d5 a; t, U1 c I% J% M- W############################################### n4 G, k& z2 q
# Separate the data into classes based on 'y'9 |2 w! d2 S& N. G% [
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]) Q3 `2 l* N d+ D# e1 u
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 l w* N& M5 M8 {9 W% X; [
' Q- \/ e4 o' e, B0 ?1 C) k# Plot the input data
" N( a, c1 P7 Bplt.figure()* S7 o, T1 B6 H3 `) ^
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')3 L. l2 B z2 D7 s' k$ M
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')4 c5 L8 B/ i7 h9 D; I" E
plt.title('Input data')5 t2 S i7 Q c' a) ]) [
* R" W& x: h. h0 u) K5 N% R4 `
###############################################$ C0 ] }' d% x& h2 }
# Train test split and SVM training
( L6 ?5 O8 e6 ]$ rfrom sklearn import cross_validation" [( r! ?% s O$ S
from sklearn.svm import SVC- W) s& }3 h- C
& R9 a0 z9 m- g7 p% y, _& D; f
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)6 G$ J) T# }; S) {/ C
1 c- B, a1 c' i! @& g& n0 }6 y5 r#params = {'kernel': 'linear'}- ?7 z8 H# R' k. U
#params = {'kernel': 'poly', 'degree': 3}
8 y/ Z1 \) P g/ T4 T1 qparams = {'kernel': 'rbf'}
& d% ]1 `# U% u, Q& ?/ zclassifier = SVC(**params)
: \1 G# G, |) b0 Jclassifier.fit(X_train, y_train)
6 x$ T3 [- [& [& Y" @utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
# p) e5 h: s2 e0 L! @6 m8 E8 b4 G7 N* N, G
y_test_pred = classifier.predict(X_test)
# l/ q# @8 @0 v9 Yutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')& ~2 a; b; r) o. u. G
/ ]- v, F1 s# `- h- l3 L# w: e/ b
###############################################0 J* r1 a- |. w8 r3 ]
# Evaluate classifier performance
( G& B4 d$ d/ X: Q2 ^% N" U% c. G% h6 U3 N( ?
from sklearn.metrics import classification_report- s, K3 U' x" I% U
. Y1 F' A" H7 q! F; T
target_names = ['Class-' + str(int(i)) for i in set(y)]1 }8 `- S: i8 ~! l' p
print "\n" + "#"*30& t( v7 p/ Q3 K3 u
print "\nClassifier performance on training dataset\n", h* K$ { v$ `
print classification_report(y_train, classifier.predict(X_train), target_names=target_names). l; h& Z; Q2 e
print "#"*30 + "\n": q" s2 F- b* Y- D H
: m. y! Y/ l! ], i" v+ g! c
print "#"*30$ T) ]& L/ J& m0 C2 T0 T- S
print "\nClassification report on test dataset\n"
, Q9 E/ Y# U3 [$ y+ B2 O S: [( V9 @print classification_report(y_test, y_test_pred, target_names=target_names)
, e% U5 F8 [. P' x& _/ ^ Oprint "#"*30 + "\n"
: a# J9 c% M4 W" ]" }" V b+ \0 R$ u- W/ e/ o' E( D$ l7 ^" T) M
|
|