- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np) B! H0 E! F! x% x: q n0 E
import matplotlib.pyplot as plt6 [1 }" G, ]" s, T! l+ T; @3 ~3 s9 }
# i7 e- b% k) S" `3 mimport utilities
& D( h, T& T1 @5 ~ F- ]& U$ i# i
/ X1 h$ S+ e& O' ~# Load input data
0 v7 I" U N; y( B, W* U8 ^input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'0 {7 j& n" {) d
X, y = utilities.load_data(input_file)
8 i% E# N2 J: H" A2 N
2 j F5 F0 o' s* P############################################### x$ G5 v. f; I0 L; N
# Separate the data into classes based on 'y'+ ?: V3 R2 v& X( O3 s1 J. E. T
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])8 r6 ^9 W5 T$ q, u$ E
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])7 g1 H- `5 S5 E ]; P j4 ~+ a
, ?: x/ m/ \/ _5 z; z, f# Plot the input data
9 x+ q- C$ V' S! A( Y# m( ]plt.figure()1 A6 `( E6 N$ B9 Z
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')2 d' c& a" B' u& H
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
2 [3 q; F$ G# @& ~3 m2 gplt.title('Input data')9 h* c. ^. ~" l+ k9 p
7 R2 V, ^1 h: Y2 Q0 u( V###############################################' @; `9 @' x/ a
# Train test split and SVM training: U/ y( G) Q: W V& M$ ?
from sklearn import cross_validation
' K2 d. R( r3 y- e, C' Vfrom sklearn.svm import SVC: | ~- L2 W8 e. j# G' T
7 A% y8 b/ a2 N3 O- N
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)+ w+ C9 o6 p$ [8 N) f, v4 y- @. C
! a* W3 M, i& A* L/ L0 W#params = {'kernel': 'linear'}/ [; n/ l' l; z/ t& T
#params = {'kernel': 'poly', 'degree': 3}8 D7 }0 K" z5 Z
params = {'kernel': 'rbf'}2 E% y5 e# N2 D; ^- s: i: A( e
classifier = SVC(**params)
9 ?* O2 `: n1 Z, {classifier.fit(X_train, y_train)" `1 q0 l4 S/ J8 Y
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
1 l K8 d5 u% c; K* o. G
5 f/ b7 h6 @0 ~7 N+ |5 Jy_test_pred = classifier.predict(X_test)) N5 R, D9 }5 Q9 Y
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
+ [ f9 i3 l6 u- o z [- F4 s) ~. w' ~0 a2 c6 ^
###############################################
^4 d! |; k7 o# Evaluate classifier performance
( f0 p. e( k! y8 F* b
* Q$ C) C* ~1 G' m! d) g6 efrom sklearn.metrics import classification_report) J* U! P [2 u" t: G* {& r
a/ C" P0 D3 vtarget_names = ['Class-' + str(int(i)) for i in set(y)]
. i S* x0 @5 J6 n6 Hprint "\n" + "#"*30
7 [4 m& n7 `1 m1 L7 y( S! e: \print "\nClassifier performance on training dataset\n"
3 T, H/ j* r4 X7 C; A4 g! \print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
( Q" V0 @4 X8 pprint "#"*30 + "\n", n3 |: a% ?- ]5 G
' J3 p+ H( U- _3 a5 d0 X9 L% H3 `print "#"*30: m1 x# Z0 e' U" G& m) k$ E3 V
print "\nClassification report on test dataset\n"
7 _& M2 \1 N4 F4 D6 Y- P8 Hprint classification_report(y_test, y_test_pred, target_names=target_names)" w) E% h$ p7 h: O) j/ S
print "#"*30 + "\n". ^3 y/ U- U. {9 v& D* L
* M" i6 F7 G0 U; i9 _4 I* g% D; N. `
|
|