- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
. G( j8 i; k! I6 T3 ~, \import matplotlib.pyplot as plt
$ d+ l; [6 H2 P ^ y1 _& }1 d7 S5 q3 ?
import utilities ' l2 d6 X, m1 A' q. T
5 J3 `9 \. ~% W$ ^
# Load input data
: P, S* s5 T( o& J* s0 u* Finput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 g2 H1 H0 N" O7 MX, y = utilities.load_data(input_file)0 w" p5 J( P2 z* W0 Q
$ m8 T. P/ K! g: ^+ T: I$ n# q& C6 N###############################################
n' `' i. [/ C3 e5 w. n# Separate the data into classes based on 'y'
& u4 }- p8 U. T3 P7 _! x* \class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
9 l q" e: i5 U: `2 @5 O5 f) nclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
$ D5 A" a# o' ~* Y* D- l
( H3 n/ u: J9 E) y# Plot the input data
. _# v' i6 z5 x4 u9 t$ Q; \plt.figure()
: z& o* Q: d3 U/ ^/ g( Z% pplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
' J' U) y* `# E# Splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
5 `4 ~& y; R! c! X* r9 g$ z3 zplt.title('Input data')
$ U# r; \1 u$ J; N7 b9 A( l9 i; n, ]; A S4 a# n( _
###############################################" ?" J! Z3 P( F7 y2 T( b9 R3 Y
# Train test split and SVM training) }$ M: i. S. F* F
from sklearn import cross_validation( W# k3 j0 m/ h! f
from sklearn.svm import SVC1 h9 y8 o/ V8 K5 S2 b! N$ M* W
: v6 V% Q) n1 m9 L4 m* q& _& nX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5) E" O, e* k- J8 \% X$ M7 A" ~
. u$ j: y( a, X3 q3 }! N8 z#params = {'kernel': 'linear'}9 m3 S3 O' H7 v. }3 A
#params = {'kernel': 'poly', 'degree': 3}0 h5 o' n- |4 ^
params = {'kernel': 'rbf'}
' u5 ]: \1 q2 l3 Kclassifier = SVC(**params)
# g3 s2 y! ~" v1 {* `5 oclassifier.fit(X_train, y_train)2 e% u; R1 f) v: D; C; O* o
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
) R6 t$ H7 I$ b: s' J8 J* r2 v+ V7 A6 [& A
y_test_pred = classifier.predict(X_test)# P/ X& J" P6 ?* \
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')( R' `4 v3 H& S
, V# `- L3 {' G9 J* a5 K
###############################################9 r! E/ \4 A4 F. h" f
# Evaluate classifier performance5 W5 @" l" P7 h4 z$ B% G
# B( n, Q3 A9 K8 G% m, e( `2 jfrom sklearn.metrics import classification_report
7 O! B% \9 l, _
' p* n7 ]' q+ qtarget_names = ['Class-' + str(int(i)) for i in set(y)]( @8 j1 z* q3 d" X1 K X2 L/ M
print "\n" + "#"*30
3 s I2 j* c8 o5 g4 bprint "\nClassifier performance on training dataset\n"6 h3 n+ u( P: h' [
print classification_report(y_train, classifier.predict(X_train), target_names=target_names). _& J$ s* S' V: c( V
print "#"*30 + "\n"5 i) n, V# @) U2 ?: R8 w
) D) p M/ T4 B) O& R
print "#"*30) N+ i- K3 D! t
print "\nClassification report on test dataset\n"
7 m" [& n+ d) X4 b" q9 z% ]" Xprint classification_report(y_test, y_test_pred, target_names=target_names)
$ d/ o. y6 j* [( r; Tprint "#"*30 + "\n"
, m8 ?$ a, L$ A- `% Q$ Q5 D _5 }) F7 C; F+ P1 Y% } r
|
|