- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np# B9 X. S8 @) m1 ]7 i& m
import matplotlib.pyplot as plt
& w! p5 g) V- v& d0 A( s- a5 b/ L. C+ F/ s* P- Z2 q0 s
import utilities / F/ [3 }! k: H2 m% ]! f
7 M. d9 _1 L0 P2 n! {
# Load input data" X( t0 c2 A' p* I" g p7 t
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'. t% H! a# }" ^
X, y = utilities.load_data(input_file)
; Y9 G% a Q4 j+ E- Q' W. {3 z, _7 _+ P5 a
###############################################/ r; q( m8 {5 k/ t9 |& B. G' K
# Separate the data into classes based on 'y'
) S6 ^% J/ Z) {( Xclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])- O. @: i! k4 ? z! @2 e: l
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 T" n; H3 H5 @6 u, N$ v# T0 K+ j3 t- t' i5 Z2 |2 g+ x
# Plot the input data; B* ], K+ _+ P
plt.figure()9 N$ F+ ~, `+ c" H2 H8 R* D
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
: x& M7 V3 E) d9 `" M2 wplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')2 Z; b/ z0 ]% v% ^- i
plt.title('Input data')) g) X8 n! F3 r' J- p) {
/ g, N* e! _8 J" [; y###############################################
$ m! G, @- G/ ~4 h2 u" @# Train test split and SVM training: m* q" @ E/ X2 o. S
from sklearn import cross_validation
7 G. Z: i/ q8 L: yfrom sklearn.svm import SVC
9 y& q* n- i& f; k) L( C- v, k/ t
4 ^$ p+ e( X% o; e4 AX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5), j9 w, m1 n% o/ I+ `3 ]- B5 T! {0 T
5 b! \ T# n' d2 X. T9 F0 j
#params = {'kernel': 'linear'}$ ~, J0 U7 Y* b# s" J
#params = {'kernel': 'poly', 'degree': 3}' e* [; G- h0 C4 l5 P! t
params = {'kernel': 'rbf'}7 h# r5 P$ a% a1 z- U* w: P( E7 v
classifier = SVC(**params)
( g- a) u- q# a8 tclassifier.fit(X_train, y_train)) u! p9 V ?7 V2 a* S$ G+ U/ p. ]2 q
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')6 k6 \# {0 a9 z% t+ N! A1 t
1 L% O( e' @% y% L, \( h. ay_test_pred = classifier.predict(X_test)8 U- b4 W# \: E4 N( `
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')# n& Q% m+ G) L/ {
: }1 o2 c9 V% H' X7 T5 B P3 c###############################################
* y$ K, m/ [' w8 O6 Q; O. y: c# Evaluate classifier performance
* t& ^( k( l8 q+ Q- q$ v. m
5 `8 G% t1 H( G c, a! t/ ~5 yfrom sklearn.metrics import classification_report
; B7 P/ e M, s$ t1 q
: Z; k( h* c, p! T1 D3 G* a* ptarget_names = ['Class-' + str(int(i)) for i in set(y)]
% q6 p6 X3 v; w a3 z4 k2 tprint "\n" + "#"*30
/ e& k7 W* [. V0 ~: S* xprint "\nClassifier performance on training dataset\n"
5 P6 T) z$ F- a5 Iprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)5 `) L; v9 Y5 {4 L) ?+ b
print "#"*30 + "\n"
, |3 d/ L* H9 _' ]+ C: M
0 ~! U9 r4 y, S nprint "#"*30
$ @9 q2 I& U8 v/ e* U8 J- Eprint "\nClassification report on test dataset\n"4 }3 q+ \1 o* T" p, l# g6 x1 J
print classification_report(y_test, y_test_pred, target_names=target_names)
2 d3 {3 `6 z$ E: ~1 ?print "#"*30 + "\n"
/ G" Y+ t0 C3 p3 p$ W, K
% d/ z" S5 {5 ^) P; {( m |
|