- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, g* Z$ h6 t- D, v% q6 S Mimport matplotlib.pyplot as plt
7 [4 o- ]) E$ Y, J m. b
. P' T1 E8 T5 Rimport utilities
) d' h* x2 g, @' |( l% Y6 \
, Z2 V9 ?# b0 I' J: p# Load input data8 ?7 M& E# K# s* s
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt') b6 l+ C$ ~' V+ D, p
X, y = utilities.load_data(input_file)0 J( E2 i! g3 {/ J Y
1 Q- l1 [9 E7 G, ?* r- H
###############################################+ E& F. g/ f- N
# Separate the data into classes based on 'y'
; J) G/ B R2 O) f" a% }class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])+ l/ f$ v6 [" G" ~; j3 G4 A
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])' A) X/ _1 E+ D r
- V8 W7 D: }! a+ f, m+ C# Plot the input data
' i% r- q, C S, }. jplt.figure(), ?- L4 x" L; i, Z; Z
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')5 j0 K O* d4 Z3 x' R% d' e
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
& ^/ E: L+ Y% B9 Q3 Oplt.title('Input data')
& J" ?6 b% K3 G) U
! L8 m6 w3 l5 u- e8 v# m' R# {- `) q###############################################9 [! ?- O5 l: f9 I, n" s0 }' |! i* J
# Train test split and SVM training: X- R: d8 |6 O# ^& O5 i
from sklearn import cross_validation
7 u& c" ~: J F- h" c: \* {from sklearn.svm import SVC, ?- V3 W% @7 @/ z9 u2 D& T }
0 V& U j ]) m/ v4 v9 nX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)- d) l/ H. g0 j9 D; y) K. s; W& ^
% ]& ?* ^) x4 O' m
#params = {'kernel': 'linear'}1 u9 F# H# j. g1 F- P+ b2 y* `2 M
#params = {'kernel': 'poly', 'degree': 3}
|$ j3 v( w: Q/ Q h4 ?6 xparams = {'kernel': 'rbf'}
& o1 q" O! D- I8 v3 jclassifier = SVC(**params)$ Y$ D4 b/ C' S0 I
classifier.fit(X_train, y_train)% t6 Q) L4 b- O# d
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
4 y; ]% O9 q; A7 c8 ~9 G( d B& W$ t3 h' P/ x' p
y_test_pred = classifier.predict(X_test)6 P6 @, T7 {# N1 B4 c* ^; G* X
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')# g h5 T8 M& v* Q0 {, m
3 ~/ t# z6 E/ G! T
###############################################
4 \$ e3 h9 ~" q' v$ u8 o# Evaluate classifier performance
# F' ^* j" L4 n9 B% ]' S
) ?8 ?6 \" O) X3 M Bfrom sklearn.metrics import classification_report" }( @: m! o9 {6 _ ^# n. d6 S
' b- B0 q* X( l' C! {( L% | u2 h
target_names = ['Class-' + str(int(i)) for i in set(y)]8 A; E+ {. g5 _; B* q7 @5 ~. M
print "\n" + "#"*30
$ s+ u& P( @+ K$ z; u5 }print "\nClassifier performance on training dataset\n"
! `+ |9 z& y5 X7 q ]$ A: l2 mprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
2 Z* \% e- c# C8 W8 Z2 zprint "#"*30 + "\n"
2 `0 o6 [" [3 R9 ^. ~9 n* x5 e K
" N" |* Q4 U/ d* V+ Wprint "#"*30
, ]; @4 H+ a7 ]! q. h& o3 x( a% Uprint "\nClassification report on test dataset\n"
8 {( P( `8 d/ i& Y; iprint classification_report(y_test, y_test_pred, target_names=target_names)
# ?" I# p- B. |7 xprint "#"*30 + "\n"$ r) Q: T, I! B& r4 T' M
' K4 q1 c* J! B: x2 z4 A6 J
|
|