- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
8 h- D0 A1 C7 v& Himport matplotlib.pyplot as plt
4 o4 l, M. g* x9 n+ X$ h! H# `- x0 M
import utilities
9 Z# }) n0 b7 {/ Y
# Z. o$ T" b' y2 c3 m8 R! C# Load input data: Z* T5 C2 D9 w; L4 |, \
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt': O1 Y. u7 x) |; N2 H. B
X, y = utilities.load_data(input_file)
# ?1 @0 n, q' a" V7 V4 Q7 l
2 N9 I+ d+ W5 T* t# D; y$ D# _###############################################. v8 b& f7 z4 i- H2 [" k0 P- L
# Separate the data into classes based on 'y'
; _& o5 \; P' e V7 hclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 | {+ B; v/ n3 M. U9 H+ A& {4 p( o
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])) [" O% o" u2 C. O2 X5 b/ ^
9 i# c' n4 b5 ]- ]# Plot the input data8 K6 _# K, } k2 R
plt.figure()" R) E% j5 t' x0 i
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
. n" e9 q$ ]& c" j7 m) x Nplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
; u0 w! m' \& a- q/ A! Hplt.title('Input data')
* x! {8 d- ~; a. q! G9 Q% _) v2 s( b, `" s1 R, o/ n
###############################################3 O5 ]( N: ~4 W+ k. S! V; k
# Train test split and SVM training6 g6 V& e5 D5 r, g! o
from sklearn import cross_validation
' J* L! c. b" N- o1 m# B- Gfrom sklearn.svm import SVC
, T/ p. i/ S7 v" h4 _ D
+ x. S6 c7 p1 Z; }+ _6 M2 gX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)" O$ A% F0 q$ G- p6 c" Y1 ~1 B
; C1 K& x1 H- r5 b0 T2 }8 \4 r
#params = {'kernel': 'linear'}7 y T7 z6 I3 x2 v( r( H
#params = {'kernel': 'poly', 'degree': 3}
c& b) N: h+ Wparams = {'kernel': 'rbf'}3 ?8 R+ c* n- b
classifier = SVC(**params)
2 J) V* {2 j& d. f- [classifier.fit(X_train, y_train)4 t9 _& l% x1 Y, i! _- h
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
$ T5 B$ j; @. n+ G3 r3 q6 @. ? g3 x: P" `1 C* u
y_test_pred = classifier.predict(X_test)# y7 Y7 V# x6 Z2 O* e( L+ u
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'), o1 f; h6 ]" n/ R( w" K
5 c/ G1 Z* q! ]6 T: p; J) C6 o
###############################################
4 N- Q; k+ m7 s8 n" r1 d# Evaluate classifier performance* D$ k* B* u: N- q" B5 o
! |* C1 y/ x" f" _, Z) y, y' Zfrom sklearn.metrics import classification_report
, C7 I7 [* V3 f. k' W7 D Q
& ^! F$ |+ P& K2 e) v) `# Rtarget_names = ['Class-' + str(int(i)) for i in set(y)]2 Y2 \2 C; T! u" ~- r: B2 F
print "\n" + "#"*30+ J) u: w' ~1 w5 h( J/ C
print "\nClassifier performance on training dataset\n"
7 z# Z$ i- U- ~+ k8 P/ T1 {: ?print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
/ P$ R B* y7 q! i9 Uprint "#"*30 + "\n"
* b1 k2 U! z& }; ]; d# h) W0 \7 S! e7 {" q- R- R' B+ Q
print "#"*30
/ I; H: h+ g+ T* \print "\nClassification report on test dataset\n", m9 l* w- K: `( T
print classification_report(y_test, y_test_pred, target_names=target_names)' Q3 o( Z, v8 n9 k$ E
print "#"*30 + "\n": f. I- E) c. d! m# z6 q
' t8 j: u/ N, @2 ^4 M/ @& @ |
|