- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np r. D8 x4 ?0 I0 O9 r3 e4 |
import matplotlib.pyplot as plt0 `" L* v$ A5 t1 g% o
1 D5 d( ?9 }9 F5 ^. G
import utilities . ]7 p1 b- C' G/ D
' r; V$ \$ @ r' w7 }. m' _" l/ p
# Load input data
3 T! `: }( l+ Dinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
9 g( l, v$ I- I* X9 p& T1 |0 L" RX, y = utilities.load_data(input_file)
1 k! Q8 T* z. k, _, Q/ q2 n( d8 B' V1 J8 E; `/ _! M
###############################################
- U( w; P, F% I# Separate the data into classes based on 'y'
. n4 d2 J: ]/ V" B$ k5 R! S% |class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])( H. X' O" B- |9 q- O
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 c# [) R! d' E: \! y) K: m4 F! O1 m% b- `/ l& |) ? `, j
# Plot the input data
( U4 u8 V% T$ T& { ]plt.figure()
4 e# V$ s3 P1 t: h0 gplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) j; H' ~4 z' n# O' `* L6 gplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')) e) i3 J: q' g$ c+ g
plt.title('Input data')
" e% ?6 p# e2 V9 O$ _7 ]' [3 L
' h0 |- K; V" m3 H2 ]7 X6 d###############################################
% ~7 a Y2 z- D) P, a# Train test split and SVM training
2 y" v2 S. p- t3 @from sklearn import cross_validation
6 o. t5 D. O, r& o2 V) c: U; Gfrom sklearn.svm import SVC1 P$ i# v* f6 n6 V% t* r, i1 f. t
w0 ?% w/ S9 W `3 f
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
. F/ y( V, M$ }! O4 @
0 j% I4 r. x" P2 N/ F#params = {'kernel': 'linear'}) v0 c j1 x( L5 F! H" M6 X- ]
#params = {'kernel': 'poly', 'degree': 3}
3 e/ D* j' D- d, a, b2 t7 `params = {'kernel': 'rbf'}
( P" N& S* O, E; h8 [; H& gclassifier = SVC(**params)
. b. K/ N' j7 u4 Y9 B7 @; Hclassifier.fit(X_train, y_train)
; A3 I# T1 k- N. V9 w9 g8 |3 _utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')1 [- Z( p' Y2 M% v% P* M
5 G4 v8 |( K( E1 E# S$ V; s
y_test_pred = classifier.predict(X_test) x3 W7 X+ Z( e) |3 A. {5 q( g
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')$ w9 @6 f( E* Q. m2 i- a$ l0 b/ m
) n* }% U0 M; w1 ]( ?1 G9 A' Z
###############################################% F: y; F5 I B& p. E. f7 K
# Evaluate classifier performance
: q4 d) A3 X, B9 W5 W
" ^8 \( k4 K% Cfrom sklearn.metrics import classification_report _ N3 f9 u& |5 `" K& k4 F( ^
S) S+ V0 E# R1 G; x% K* o, h
target_names = ['Class-' + str(int(i)) for i in set(y)]' h6 e# H/ c7 [" |2 E/ K' o
print "\n" + "#"*30
% F9 G) c! o6 W$ T2 Iprint "\nClassifier performance on training dataset\n"
3 f. i( O" d5 D2 `, {) J( N, T; sprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
: Z- p; R. }4 X ?. D$ `print "#"*30 + "\n"
8 [% H# ?, z3 U& {0 ~# o+ O3 W7 x* Q; z; d6 D; V/ W7 b
print "#"*30# n0 J* z" d* ~/ h
print "\nClassification report on test dataset\n"
* d' [% P' M) d3 f4 Vprint classification_report(y_test, y_test_pred, target_names=target_names)
4 Y H. b* f3 E& d0 ]print "#"*30 + "\n"
+ i# f5 ?, A3 G4 R3 U. @& x2 @" W3 z6 [% C, \: E) N( J* u
|
|