- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
- m* X0 i. l; ?" @8 s+ kimport matplotlib.pyplot as plt+ `$ Z. i# c L$ n5 t: Y# T& h
9 l- b* h0 x' k% P2 ximport utilities
. S7 [7 b- J" M! K5 q! K# `7 O) p% q0 {: |! \
# Load input data9 ~9 D; p/ A8 a
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'7 ~$ |, E4 C) g
X, y = utilities.load_data(input_file)& [1 x- H5 u6 {* k
4 H. S. I: q L" d###############################################) k5 m3 X$ ]- d2 B* O' M
# Separate the data into classes based on 'y'
9 `. B& {9 ]; d ~class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]); o7 S/ ]8 h0 R! w% O# H" G. W
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
2 T7 ~6 p! S% J- r+ b
& \/ t8 p& i+ d' c# Plot the input data
5 C7 T1 z8 h/ a! W. f6 Tplt.figure()% p9 \7 X0 Z5 S! A4 q" W5 @6 [& j
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
1 G' O6 g7 \' `' Xplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
; f( U5 |( X6 ^+ x: kplt.title('Input data')
* V( ~8 V1 M5 _ q9 k" e0 _) \6 |8 U0 L; @
###############################################3 R& A! R2 ^# h0 H( ^* f& g
# Train test split and SVM training
" Z0 v% U2 B E3 h+ Dfrom sklearn import cross_validation
% H2 ]9 r' @ R# y& U/ w( qfrom sklearn.svm import SVC
# z) c; c+ G% }2 {5 j' V7 D, Y! Z, t# U" `0 R3 f8 w b e9 H% X: m
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)$ X& n4 f' q7 r% E$ O/ G3 e
x5 t7 c) j0 B/ m
#params = {'kernel': 'linear'}- |( r2 `& g' t, k1 Z) k s
#params = {'kernel': 'poly', 'degree': 3}9 O0 K% J2 b0 P
params = {'kernel': 'rbf'}: R& D. I" o @" ^0 S8 w U
classifier = SVC(**params)
8 p1 ?( V/ r( n+ Fclassifier.fit(X_train, y_train)9 A6 v7 }$ T( E
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 [& K+ |/ M# ?3 K c
" j" w- O3 y6 y9 ~! T* {7 by_test_pred = classifier.predict(X_test)
" \! M8 u5 {8 R4 x9 iutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'). V3 d0 K" U2 ?& A2 v/ x
- `- o+ j2 U5 f# a! W& [
###############################################& p) c9 K8 l Z8 s
# Evaluate classifier performance
% J9 }! C! a/ g- g: \- D% u; F/ H% @% X7 z
from sklearn.metrics import classification_report, \; z3 @. M1 e K' m* `# `
+ V) B+ Z$ W, D6 _9 itarget_names = ['Class-' + str(int(i)) for i in set(y)]
# U6 T. j! F! J4 j% X9 j1 |% Jprint "\n" + "#"*30& I# x4 ~3 q. j3 M4 k. J
print "\nClassifier performance on training dataset\n"
/ q- F6 H. J( Y# S! Xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
% t7 g, q1 Y7 L, [" nprint "#"*30 + "\n"
4 K6 R8 n4 t3 P7 y ]" z4 {2 U* b
print "#"*30
$ s( S5 P* X( _7 h+ Fprint "\nClassification report on test dataset\n"
; u u" v' l+ t6 a. @9 ~- Vprint classification_report(y_test, y_test_pred, target_names=target_names)
6 K! t! ?5 S2 r4 Cprint "#"*30 + "\n"
$ S: }$ E3 c# ?: H8 c. M3 `# d7 J* W
; t, E4 C+ F# x |
|