- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, M, @ Q z) J% a" kimport matplotlib.pyplot as plt
% K9 W6 A6 K" R+ A( P/ c0 ^
' `4 Z6 N W% V( K+ ^ \% F" a) N- zimport utilities 6 Y1 ^/ b: I q7 q& }
4 u; X/ f+ c1 t4 x; v# Load input data
$ h$ N/ g0 U; j" P: e8 Q- o8 Iinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'& @) n( H0 f ^' g
X, y = utilities.load_data(input_file)& m, C5 p! Z1 n3 U) S
% U# Q. Q. K4 H, ]5 o
###############################################1 t0 j) ], M7 B& g6 m1 P" \
# Separate the data into classes based on 'y'* p/ l0 U F I' J
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])& q3 ?6 f- U2 j
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( I& o# M! ~2 U* I1 Y7 {* i2 \; D2 y8 \8 p
# Plot the input data. p V+ |0 i: q9 U
plt.figure()
) u. @- ` A, Qplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
2 c' I' q$ t! Z4 O' |plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
5 t: o' ]- i. ~+ Fplt.title('Input data')
3 g) C. b5 C' h/ q% O8 m
/ O7 `& }- i) c2 b###############################################
% k$ _( C. e- L# Train test split and SVM training5 d7 D8 N7 L6 S6 }% x }+ x; V7 \. u
from sklearn import cross_validation! V3 c1 ]8 I; K2 x1 _! [/ Z1 o) f
from sklearn.svm import SVC
; _. F( h c6 ?( p1 M% x2 F! Q1 p/ I+ O1 Y6 Q' d, `% h+ z$ O1 j% P
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
8 V) O/ S6 @5 G; ]8 U. ]" v1 S7 Z% x8 Y; X" J
#params = {'kernel': 'linear'}' c7 y) ?! [6 Y. o4 N& o7 e# q6 X
#params = {'kernel': 'poly', 'degree': 3}
. {* @; a% S' V& }1 p+ A' s9 {, cparams = {'kernel': 'rbf'}. i! l* @4 n9 e& F; _0 p1 Q! V
classifier = SVC(**params) Z! G! O2 O: Z! |4 A
classifier.fit(X_train, y_train)& E1 w' |4 G1 M8 |, f8 r2 N, c
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')8 p, \0 `# p% o X8 u7 @
4 ?1 s4 O+ d: u
y_test_pred = classifier.predict(X_test)! S6 D8 a* }4 d! N6 q
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
$ r! n9 p( w5 N% T ]0 u- z5 M- ?) H( K7 p/ @7 v
###############################################7 Q/ V3 v* |& I/ U7 \7 [0 L! L4 v
# Evaluate classifier performance
' o. [$ [+ {/ f. ~0 a1 ~4 _+ e6 t- N( i
from sklearn.metrics import classification_report; x/ K+ x! s& E+ N; g
7 P6 `' b$ W/ b, ^! Y
target_names = ['Class-' + str(int(i)) for i in set(y)]
0 S# p. K% H3 ?% C Lprint "\n" + "#"*30; R0 e* Z% |. G# T
print "\nClassifier performance on training dataset\n"/ J3 ^4 d: D' v9 n5 P. J% R% _4 n8 v
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)- A4 u. O* |+ c- C
print "#"*30 + "\n"
: `/ y& K8 K- f0 h1 Y. z# _ _* q
# d2 D2 ^. R/ \4 `& B% E. S3 @print "#"*30
, Z- V. n0 o$ R3 ^: o& O+ [print "\nClassification report on test dataset\n"6 K# B3 o" `, ^1 U# l( Z7 P( H
print classification_report(y_test, y_test_pred, target_names=target_names)
7 \$ j% F/ m$ d: {9 t+ H7 C, _print "#"*30 + "\n"
) y4 i& N% G) J! m5 m4 `" c5 P" x# k
|
|