- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np4 x* O9 u L% L8 Q2 ]/ U
import matplotlib.pyplot as plt
& `" n3 i( z! t; A' t' O, h! z8 |- ^2 p# }
import utilities 5 D: G3 w4 z6 D4 N0 H2 w
; R2 N/ b. L3 Q1 j: g
# Load input data
7 s8 r% G# [1 q; J. {; ninput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'5 G V/ w& x) @3 ^& B1 V
X, y = utilities.load_data(input_file)+ f; p( Q- I2 l6 `' \
1 @( E+ Z3 C9 D* I @
###############################################
& c* }& K9 t8 v: v& ]# Separate the data into classes based on 'y'
$ c% `* h6 h5 W4 q4 K: rclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
% O* u; G/ ^* m+ Qclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])5 e7 e% I1 z7 }% Q1 E- C9 B' z" K
/ L% r2 V- I: G3 f8 e! U# Plot the input data
" n. c" \4 p* G, G# ^) O. Kplt.figure()
$ q9 u- q( u0 Fplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')0 _2 ]* I+ w! s( b
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')+ s7 u" H! ~" u5 X$ Z! S! w
plt.title('Input data')
# _' A3 ?3 j# Q1 A }7 u! ~6 y4 s0 s/ f+ O! ]3 S
###############################################8 o2 z9 Q% D! R( z
# Train test split and SVM training
" U: d3 V V2 U$ u! Y4 g$ O, f3 Vfrom sklearn import cross_validation
. E1 Z& S# Y. t4 wfrom sklearn.svm import SVC
' v" _4 ]7 P& `. [' g
, ~3 b- [+ K* D% x" [" KX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)1 c# ^1 l) o2 u, P8 E8 e3 a) J+ Z
5 q: `/ I$ a+ U; J, i
#params = {'kernel': 'linear'}. Q! x. _2 o. ]. b' C! m
#params = {'kernel': 'poly', 'degree': 3}) X8 P- \& `1 J# d+ J
params = {'kernel': 'rbf'}. X! a4 t9 F0 K' G7 \/ z( r
classifier = SVC(**params), g5 F+ \* E, b7 N* }
classifier.fit(X_train, y_train)
+ W) c/ s7 ^7 b8 r. T( F2 `utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')- |, e2 n, Q0 m7 j. h8 W9 |
* F3 b, G0 r; R8 _+ }. \y_test_pred = classifier.predict(X_test)
$ Q+ G9 I2 t! Q% K. Y. t, nutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')$ T4 Y+ d- E: ` o8 a3 j
p2 G* O) {) _# a+ E% @
###############################################" @2 R) g: X3 k% g4 ]$ n8 Q9 [
# Evaluate classifier performance) ~4 D) K% M! K8 S( M& i
3 c! o! ^% s. j% k; V$ ifrom sklearn.metrics import classification_report) L3 E& s# y3 v9 i. K$ Y" F/ r
, f( z3 |: r; y' ~5 e- _/ Atarget_names = ['Class-' + str(int(i)) for i in set(y)]9 i0 Y9 ?+ s6 p: _7 f
print "\n" + "#"*30! R8 e1 T: r) h h; _8 f. Q
print "\nClassifier performance on training dataset\n"+ `+ K7 _5 I% S% ?% Y1 Z( W' V
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
& e3 K0 e; M; {0 Pprint "#"*30 + "\n"+ L+ z* D- c# Q
9 e3 F' c1 M/ G$ s" oprint "#"*30
; B1 l% v# L8 o( R. uprint "\nClassification report on test dataset\n"
- e3 p' l, w5 u. Hprint classification_report(y_test, y_test_pred, target_names=target_names)- J5 H1 a7 @0 D1 s* ~: ?, E# H
print "#"*30 + "\n"
$ t; e: E+ T- w- d9 n+ v' x1 X* [9 q/ x$ |
|
|