- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np; d6 k" t2 J8 q
import matplotlib.pyplot as plt
3 L/ {7 m2 e* ~1 X$ |- O O3 g* r% I* J1 o! z C
import utilities $ j8 s8 m5 n- v1 y, m3 Y0 G
: {. S8 I! r+ l# k4 v$ X
# Load input data7 b1 b w: ]9 G6 r' S$ j
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" a5 |- i4 v1 i( L4 `X, y = utilities.load_data(input_file)* h8 N U7 V$ d
3 n X$ m* y& ?3 G1 ] j8 W
###############################################
|4 z! K' ?& E( ]% R# Separate the data into classes based on 'y'
* H/ u* t/ Y3 J* o- cclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])/ M( F7 o+ F5 |. y
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])7 z% V* P( } \$ x5 c
/ J1 ~8 F6 n: m3 O
# Plot the input data" u/ R9 k% ?$ G* e5 D$ D$ E
plt.figure()
& W3 D, s- S* {% ?! C2 |- Kplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')/ F% T3 B, V7 a
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')" k/ o& G6 [3 D& g3 R9 l
plt.title('Input data')8 q& k, L+ _2 k' b7 D$ _; D2 T
- L8 h; N! l# f3 W, G6 R###############################################
- Y( }* B, f, k% c" u' ~) s# Train test split and SVM training# D* s* S, W' y5 K( J. U) y, y
from sklearn import cross_validation
6 a G) Y5 U& } A0 K/ N9 pfrom sklearn.svm import SVC
( X1 U. K$ T; }9 h2 [/ V7 W& K0 _% }) f( ~9 m; ^6 R
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5): n/ R* b3 @5 ^/ w4 @8 q7 y9 w1 l
3 o. W1 P, b4 O+ i7 e, U#params = {'kernel': 'linear'}
. w3 N: ]% `' j; F0 U* B#params = {'kernel': 'poly', 'degree': 3}
( i- c4 s) t; ~/ x) I8 tparams = {'kernel': 'rbf'}' C. \/ w* p0 J
classifier = SVC(**params)
& y" B9 S) S9 X8 l3 X! Oclassifier.fit(X_train, y_train)* j: E6 @1 J% p2 z
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
( P3 N) H! ~+ c$ \, q$ A
0 A5 `8 w2 S3 z1 W! z6 b& Oy_test_pred = classifier.predict(X_test); ?2 w3 G2 n/ _' U
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')1 f9 O: G" e! _* n
1 u y7 _( ?6 e- N* {& d
###############################################
; I% j% c5 y9 u. h2 P# Evaluate classifier performance
' a- ?5 z/ r( P. e% o
8 L. w$ B* w. v# Hfrom sklearn.metrics import classification_report
: [9 P5 Y- m9 I! a( t4 K. ]5 R
* R. X( I. z9 e) K8 w* dtarget_names = ['Class-' + str(int(i)) for i in set(y)]1 @ ?$ H+ N4 S1 g' |$ D4 J
print "\n" + "#"*30
, h6 ^5 v" [4 l9 v/ e; dprint "\nClassifier performance on training dataset\n"
% t* A8 s( H4 o0 w0 l! x Pprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)4 ?# h& _0 Z. S, Q
print "#"*30 + "\n"% D$ g5 Y# w3 h$ V$ E: I
& ~$ u# o% `, J4 @, V0 \6 ^print "#"*30
1 \; \6 A& B7 n) z3 ~print "\nClassification report on test dataset\n"
' P) p+ ?( T( [+ ^; n- ^print classification_report(y_test, y_test_pred, target_names=target_names). m! Y6 [2 u. l
print "#"*30 + "\n"! M( S. H0 d2 T; j* ~
, W5 u6 M# N$ p) {! t |
|