- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np8 l+ m. C' U1 W
import matplotlib.pyplot as plt) J0 s7 [# Y4 u8 m( a3 z
- d% ^( @; b$ E6 jimport utilities
# _. D7 E8 T/ R0 V" G
8 N* r5 u2 r1 P; ?8 g# Load input data9 A: ], h+ C5 g) a T% K" U% N
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
9 K4 \7 w( J: ?" E* CX, y = utilities.load_data(input_file)# J/ u, I# w: v- b
$ z1 s- m& [. B! h9 f7 C. X. \
###############################################) c$ Y6 G. n+ f
# Separate the data into classes based on 'y'
' r: f# T R' V! {' n( |- Bclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])5 q2 E6 @3 J! I# x: T9 U! k
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]): ?! A0 y" Q- y2 \1 G4 E1 f
* g4 v& m8 Y: c/ Y6 K; b
# Plot the input data
5 v) G9 {1 h' ~. z$ F6 Dplt.figure()' \; k; O1 H) T/ [5 j% ]' N
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')' ? U8 d6 m2 K2 H: f8 h
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
! e# y. y, F1 Q A/ Xplt.title('Input data')
3 H; o& W4 ~- l0 M! j( Y' y0 y: c, s7 ~. b; ?9 V3 _/ d
############################################### V* J7 w, s5 p, ~& p
# Train test split and SVM training
" G D9 x( b, u$ s" V1 O, Ofrom sklearn import cross_validation
; n% ]/ ]9 A0 c6 z5 z1 a2 E0 Lfrom sklearn.svm import SVC
" u# o7 N' L( ^0 x9 S( i0 H, z% u
~# n# f2 c( g0 ^* G" _X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)# j' W1 u2 h9 y' G- }( `' N( D
6 O9 Y% @& w: x
#params = {'kernel': 'linear'}
! l+ q2 J9 V$ \+ e" N#params = {'kernel': 'poly', 'degree': 3}
$ d" O: b5 P9 q; S9 I! xparams = {'kernel': 'rbf'}
6 [& G3 ~2 H& V c3 w# W4 ?classifier = SVC(**params)
1 d; V) m5 w" [3 V: m" cclassifier.fit(X_train, y_train)
0 X. C# V( x+ Y: ^6 g( [utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')" b+ l' h% k5 U5 ]3 W# K7 J
8 @9 y6 S6 {# `! Xy_test_pred = classifier.predict(X_test)8 ]. M& h% J, h ?- b
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
O3 U. K& N* _+ n+ s+ G: ^! w- v4 u" C+ ]2 L' ^* ~
###############################################
, C8 l8 ?. j! q/ U" b6 s0 F# Evaluate classifier performance
( M. L5 a; N& {6 G/ q* X
) `9 x( H6 H; t9 Wfrom sklearn.metrics import classification_report9 d# N" }: b) E- |/ K6 p
9 e7 d( |1 D. G
target_names = ['Class-' + str(int(i)) for i in set(y)]$ t' e0 ]4 p1 \ v
print "\n" + "#"*30) w; G; n5 x- h9 s6 X5 a6 m. n7 [
print "\nClassifier performance on training dataset\n"; M% o. U7 @- E7 `4 s
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)! p( D0 M0 W( A0 a7 t8 w8 u
print "#"*30 + "\n"$ E4 c+ A9 L: Z! s2 @' L: }
+ ]6 \( N8 @- H% R7 \
print "#"*30
4 C7 R9 T1 r, v& aprint "\nClassification report on test dataset\n"( A1 i+ D' P9 _6 A' o- j) A
print classification_report(y_test, y_test_pred, target_names=target_names)( [9 G7 ]4 t4 I- R
print "#"*30 + "\n"
, J$ r" c) P8 E& e3 {% W' Y/ J3 ~% U1 [! l" ?& Y# [3 S; C
|
|