- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
) }& W' a5 x' p. G/ cimport matplotlib.pyplot as plt
) g: J& b' }; F! y
* k, |( a' H9 ?& I6 e: H* eimport utilities # |8 ~' x1 i/ [
6 p, |5 T* a' P, K- a- Q+ q
# Load input data
) J- s$ o9 K+ Z0 F* m* q* j: m$ Sinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
( g' l8 \9 A* G3 S( G8 d% BX, y = utilities.load_data(input_file)
( R- x1 s( k1 s, [# P& m, b! X0 e7 l+ `) g
###############################################$ I0 E7 v, c' s
# Separate the data into classes based on 'y'
! H/ G+ K# z! Oclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]). |6 \% o0 o$ v* [: {% _6 r
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])6 s. x4 T" N) m
$ Q. J: z/ o9 y# Plot the input data
- s4 }6 ]/ s6 `plt.figure()
- `% T+ f9 s$ C1 _( fplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')5 [' o3 h+ e, Q5 C
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')7 y& t8 R* M" |0 r# ]9 I: Q3 _5 w4 N
plt.title('Input data')6 E/ C* }& S' {
! R5 M( r3 h S; B ]! a# V
###############################################
* g2 W1 [: U+ C1 s) o. c9 b% H: \# Train test split and SVM training
# l$ `0 e- |0 v- h6 [* ~from sklearn import cross_validation
+ y7 \0 i8 @; p) C! wfrom sklearn.svm import SVC
; T7 Q- I/ M0 @7 u# e( N6 M
2 Y+ H. H; k* a4 k. m1 \+ Q, xX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)' b0 s1 D! P/ C& [1 a- W) k9 q
! ?3 _4 U8 ^, |7 \#params = {'kernel': 'linear'}9 ~+ o, ]7 Z @$ L3 y6 b
#params = {'kernel': 'poly', 'degree': 3}
- s' F3 k6 z- z d5 f Gparams = {'kernel': 'rbf'}/ [; ~1 Q" |) d
classifier = SVC(**params)$ \, X- E' M3 [0 Y
classifier.fit(X_train, y_train)* i6 B r7 M& P2 {0 t6 V2 t
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')5 v) X e! K. [2 h, ?! A, J3 E* V
* k1 g+ k% d; V! C8 y, Z2 B; O
y_test_pred = classifier.predict(X_test)
. @: v$ a4 E! J/ }& O$ r, o- |utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
# D0 A3 n9 g }$ m0 N& h- A* p4 {# L- [3 h9 e
###############################################& u7 z# n+ N, M! n# w
# Evaluate classifier performance
9 ?2 [) |4 Q, g' s! k; M9 e
& \5 o# D W% O' ^' w ffrom sklearn.metrics import classification_report
8 X6 z3 ^, B+ F8 r5 _ g' x. {: F; a- R- t. p1 ]
target_names = ['Class-' + str(int(i)) for i in set(y)]
: s, p- _/ ~; H$ n% z5 }/ t8 cprint "\n" + "#"*30( ]2 f0 T6 X9 P: ^) u% z$ |
print "\nClassifier performance on training dataset\n"! z, C( s4 O/ i! } y" T
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
+ w/ z3 Z0 K lprint "#"*30 + "\n"
. U4 {3 W' A7 M4 K
5 l& s8 M% N' y6 Fprint "#"*30+ e g8 D0 c3 a0 m y) {6 v
print "\nClassification report on test dataset\n"' D& H9 c" W9 B4 u5 T, F& O
print classification_report(y_test, y_test_pred, target_names=target_names)+ Q9 @* j2 `1 m" z# G/ d
print "#"*30 + "\n"; s- t' n/ V% h2 {9 k$ o5 g
; A& Z0 [0 _; j. u0 w+ x1 J
|
|