- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 557
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np) i1 t& \0 m4 y. m4 D1 P0 q. L
import matplotlib.pyplot as plt5 {, |: u5 p( A( D' A ^
0 j! v7 |, L: u5 p( c3 rimport utilities
- i# |: E G2 V: W3 K* D$ G, d1 n9 Q& j9 Q* H. d# t5 J
# Load input data, @+ p% c# `6 G8 h0 }% `
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'8 Z( P: ?# F' u+ q1 D
X, y = utilities.load_data(input_file)
& W- _% t+ s! K$ H# V) S. ~" e* N1 v) W
###############################################1 Z+ K: T8 S, j m9 R% y& j% e
# Separate the data into classes based on 'y'! o* G" T! Z1 r% @" m" b' v
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])" a8 c6 M4 L( B$ t1 Y8 Q7 T* C, ?
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])- i; k" x9 \# o
' j n0 P, F% y6 R, J# Plot the input data
( _2 n) _+ {) }1 Pplt.figure()+ Q4 E3 l# H# ^8 B7 \4 X; Z
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')5 f1 n; }- i. _ p
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
% [ k* |4 w& s/ X6 s2 i* Y; [plt.title('Input data')
* F% r* P: S7 R. ~1 g8 {! H2 Z- M; P7 E! s# x- {
###############################################' q& `3 t# H* z" T/ \' r* s% W
# Train test split and SVM training0 t- ~. _+ n" e* `
from sklearn import cross_validation
; A& J2 n; k. y9 bfrom sklearn.svm import SVC
; S0 R, H( [& E7 J- B5 H; d/ h* f8 f& k) \# G% j6 [0 T
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
1 L; o- l9 E R" e! P) m, N. [( @3 ~4 R0 w2 @; ]
#params = {'kernel': 'linear'}
% }3 g0 z/ A; |#params = {'kernel': 'poly', 'degree': 3}$ \- ?) o% Q2 u! M( A, H
params = {'kernel': 'rbf'}1 b& q- U/ k) |; A; v
classifier = SVC(**params)5 ~5 |( h ^, T: w2 V% ^" i6 ]; I3 L
classifier.fit(X_train, y_train)
2 `! ~3 ^) W/ zutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 w9 Z+ t" _' U6 B
& }* d0 z* A4 l1 q4 u: f: \ T/ {
y_test_pred = classifier.predict(X_test)# c, f. G7 f2 I
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
0 u- R8 v w8 t \) |4 \- N4 g' l
% `7 D, q: S+ j( o2 r+ [0 x8 y4 J3 c###############################################
7 N! {5 _7 g' J2 O# Evaluate classifier performance
( z* B! J* F) ~8 K
1 u3 Z4 E/ }# `2 ~- Pfrom sklearn.metrics import classification_report
0 k* x$ b n$ t! T& w
1 ^7 o. a4 j8 o2 D1 [target_names = ['Class-' + str(int(i)) for i in set(y)]
/ N$ k- ]7 J4 K+ x/ g% a ?9 y+ Eprint "\n" + "#"*30% C" I( v+ E6 R* X: y; Y
print "\nClassifier performance on training dataset\n"
% |& ]5 r" W/ f: U# j+ Jprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)0 ^$ l$ |8 o# c
print "#"*30 + "\n"
' ]8 q) i4 F, `, e, ?3 }% W, A5 }* N, H; _$ Y- u y% b1 r3 l
print "#"*30
) x( o$ g/ e1 W: wprint "\nClassification report on test dataset\n"3 V4 w/ _! b. G
print classification_report(y_test, y_test_pred, target_names=target_names)
2 D5 x! q* R3 O; {1 Nprint "#"*30 + "\n"
; R5 ^! v* O6 y1 V4 B! N7 G8 Q1 b: o9 }
|
|