- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np4 o. u9 z- v7 d6 Y4 e
import matplotlib.pyplot as plt
# j) q7 |* L, @( l9 k" P2 w+ e1 n! @# b4 ]0 ]) x% g
import utilities ( b; J8 @1 V. }& X0 L$ f% v
8 r" s+ o2 i# r2 M0 ?7 H* a) R# \
# Load input data
* B m; P4 s# \2 Jinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'$ G5 v% T3 D1 E$ ^4 s0 t, r8 d4 `
X, y = utilities.load_data(input_file)
/ B8 ^$ {' H7 [- k6 s g/ G) d4 a9 Y; e- N4 s9 I6 `2 n
###############################################5 j, _8 R3 l5 C7 f4 S1 j% q
# Separate the data into classes based on 'y'
- Q7 c) ]& r7 W' \+ jclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
5 t, p2 H8 y2 l7 Z) w8 A6 v& }7 y$ kclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 ?2 Q0 ~) x1 f* ~6 W+ Q7 x
' z) V9 H+ e8 y) R! O2 X1 m3 i8 F# Plot the input data
) H/ s7 k6 q( W- L3 v, m# e, Hplt.figure()4 y% k% `4 m" `
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')$ N6 q2 D6 v8 b, s' ?0 E2 a/ E
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'): Y+ g! }+ t! o
plt.title('Input data')
& ?! d$ T0 C0 G% Y$ ~5 X9 h- e# _" `* r
###############################################/ F0 `$ C2 q5 _) u. [9 b6 X
# Train test split and SVM training2 E3 T* R8 j: K- z6 a
from sklearn import cross_validation9 v2 l8 G2 d3 H. |
from sklearn.svm import SVC
7 ^0 B2 z: r( s& h" q) \6 w7 Q6 J6 F3 s: R
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
; w; B% a& ~' @/ a
% M/ k+ g; L) s }, E#params = {'kernel': 'linear'}! g( f: B7 Q9 H9 r: \, H/ e# Z
#params = {'kernel': 'poly', 'degree': 3}% B* [, v8 Q0 w/ }! h
params = {'kernel': 'rbf'}
: |) k5 T7 _: [9 A: @classifier = SVC(**params)
1 r' C5 X, F; r/ M& cclassifier.fit(X_train, y_train)9 Y! w1 N( q) L {, k' Y
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')) z- j9 @. l8 Z ^5 a8 f# H
9 J0 E5 Z/ x; g. iy_test_pred = classifier.predict(X_test)
7 ~( t ^8 d2 {9 b2 i: g, a! Autilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')) S' ^4 X2 h0 |. U
+ s9 ?0 d* i! C, R###############################################
! `/ }. K2 P5 H6 D- h, V# Evaluate classifier performance
9 Y8 D, |9 J$ m7 [& t
- Q- L& m6 M! m8 m: ~6 f" v Sfrom sklearn.metrics import classification_report
/ w+ V+ ]; A( \" C4 Z5 \! c: ?2 B1 M. ?, v/ g1 Y$ E
target_names = ['Class-' + str(int(i)) for i in set(y)]
) G- l; j2 x6 ?' ` M0 }3 W3 fprint "\n" + "#"*30! x0 v v2 R$ R( T" u
print "\nClassifier performance on training dataset\n"1 I/ E6 Y) q* R* q* z% Y% w
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)% j( G ?: v; m, \! G; j7 ], f
print "#"*30 + "\n") t& G' g- `9 c
) X! W. y, R* q% l0 K& [+ Vprint "#"*30
- ^3 h' W, T! y( yprint "\nClassification report on test dataset\n"
; R" `( D# Y) b$ c. xprint classification_report(y_test, y_test_pred, target_names=target_names)
3 [1 g) B7 i. P; x u. ]print "#"*30 + "\n"8 M f6 a2 g' j ?, g0 z( q/ }9 r3 d
; d7 E G! E3 n. U4 v |
|