- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
" t$ c+ ^& [9 Z+ W) Iimport matplotlib.pyplot as plt; g% H! F$ C$ R4 t; Y1 l* j( r
0 I: _- c! k. ], X
import utilities
6 O; l: X( n! b. F' O3 X0 p2 i1 W& _ o* h: i& j' O
# Load input data
5 Y! o* }8 E' t9 zinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: |. X. H, S+ I, [. c5 ], u9 HX, y = utilities.load_data(input_file)/ c4 r+ I% Y2 s/ f" ` V# P; ~
7 \- Z6 @! y$ D* Y" o9 [ t# {, r
###############################################, A7 k7 V- D% [6 N1 I
# Separate the data into classes based on 'y'4 S( y9 g9 {# O
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
- ^) p( u9 p ]+ I; ~7 K/ t6 oclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
0 e8 \6 g: p% N# T
0 A4 g) v2 U& R4 ~2 A# Plot the input data3 f6 o2 q0 T- j" u, r
plt.figure()$ ^+ u6 G+ M1 x) f- U# Y( l; B
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
. R4 A* t. i c b* e' Fplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'), u+ C. i) G- ~# L+ S9 I" |
plt.title('Input data')3 [1 a& ?/ A/ o, P8 B0 `( e1 _
; a" y9 C6 T1 Q###############################################
6 d( M% |# b. G3 P' M; V: k/ h# Train test split and SVM training8 r) E8 S. X \
from sklearn import cross_validation
% H# ? ~+ B/ b; D7 kfrom sklearn.svm import SVC. V$ [. j4 b6 Q
& X; y* V- d# P: Y2 |5 LX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: ^2 Z4 d! ~% N! u6 f) G, d' n
) k( d8 B% l+ U9 W* j4 C( R#params = {'kernel': 'linear'}) `! d" p2 `" i4 O5 d
#params = {'kernel': 'poly', 'degree': 3}/ l: r6 Q/ J3 u5 p+ g1 F1 G
params = {'kernel': 'rbf'}
0 j# k0 ?1 E1 Q$ Wclassifier = SVC(**params)
+ @9 h& E) Y' A- _classifier.fit(X_train, y_train), @7 D. ^- R7 P8 @$ K
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')8 X- p' ?! n# ?9 F+ \
- m/ d# D% @) Y! B: F/ n. r' o- H; \y_test_pred = classifier.predict(X_test)
8 k+ }% r6 p ` c4 Gutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')# ~ j% X( r9 W& U+ \" w8 t, H
( `* i! V- |4 _) u8 [& o3 }3 p###############################################! l t3 K. s: Q4 x6 ~
# Evaluate classifier performance; I$ L6 D+ ]# `
/ Q& O G3 A; I% Efrom sklearn.metrics import classification_report2 y) v0 S' N6 b( ]
( T4 G3 f0 i% f/ p
target_names = ['Class-' + str(int(i)) for i in set(y)]
* i1 K* u6 s. M6 V. Q7 B: n+ Wprint "\n" + "#"*309 J) x& V' \' T4 c* ?$ r
print "\nClassifier performance on training dataset\n"
1 b% c2 o" h+ N' A& e, S Pprint classification_report(y_train, classifier.predict(X_train), target_names=target_names); C+ _3 f7 n9 ]: T% I ~
print "#"*30 + "\n"
# G3 a! w! \0 p3 E
! l) S& F* _/ t u" v3 bprint "#"*30
1 q. O- I0 | `$ X& {print "\nClassification report on test dataset\n". z& j' y$ \ @1 |! c( z
print classification_report(y_test, y_test_pred, target_names=target_names)
* ]1 q% m6 f2 Zprint "#"*30 + "\n"$ x0 e+ k Q+ W
" t/ t) z+ @+ P" K! i, D
|
|