- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
$ { Z( o* X ~6 W: simport matplotlib.pyplot as plt5 ?! `0 V% N8 S; {
8 n/ K J3 @. C8 b, F: t6 ?import utilities 3 y ?, {+ Y9 Z6 n
3 c. K8 {' c3 `' ]2 g& u. g* t# Load input data* l5 d( Z2 d5 o
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'$ d3 O ^ D. ]0 |5 S" f
X, y = utilities.load_data(input_file)
8 c, d6 a9 w) h R6 t" U
0 e- X3 N& {# G, F/ N###############################################
( |) c! R- b! v# N( w; V# Separate the data into classes based on 'y'
* H! s4 V3 W* Q& `7 f7 Z4 M" x' P) kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]), R* n! e* y- ]& R0 `) Y
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
; w2 x) _; _8 b5 g- Q, e# o" P( {& U, @9 A9 y- w/ V/ x0 q, O
# Plot the input data
, j8 V( b) B. X5 w( G% ^% qplt.figure()
5 W1 r; t; W/ z4 |- u+ ?5 uplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 R7 x0 [7 c9 o; \9 }
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
% v' N7 J- f8 ]$ E8 Uplt.title('Input data')! j! v: @/ p0 N! N6 p" D
9 ~& A# n& s1 |( F6 S7 f###############################################
+ K2 e+ S: j7 \8 Q4 P# Train test split and SVM training
5 ~( }. `3 Y/ X. ]! {( l$ v$ efrom sklearn import cross_validation9 r" ~# F3 E5 q7 q
from sklearn.svm import SVC2 p% Y" P' d/ z2 V% d' q) C
) X2 {- q& _: c' H: Y
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
$ F" U; o2 B: d" e& v- u0 x* q8 e8 r" S, _" U: A
#params = {'kernel': 'linear'}
; a# t: @% E1 N& U8 @#params = {'kernel': 'poly', 'degree': 3}" I e. b; z. I* \3 [4 Z" u$ S, k0 G
params = {'kernel': 'rbf'}
+ w, B+ V9 y5 r5 Y, O8 u0 @0 _classifier = SVC(**params)) e; ]1 K( P" T/ z4 {$ V8 U* {8 y9 w
classifier.fit(X_train, y_train)& y" b$ V( z! O" y/ |
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 o. O& F5 M' ^. S5 ~. O# N f
6 Y8 F' z+ q( Ty_test_pred = classifier.predict(X_test)
/ x+ W6 B( W1 |. T1 d! Hutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
2 C# T9 B* v* S3 P# k
% l+ u6 ?) Q ?############################################### L; y- V' k, n: e; ?+ r( q/ e# ?: {, h
# Evaluate classifier performance
. ~3 T5 j( D6 Y' n
- d2 r' n) w) t! U+ }$ jfrom sklearn.metrics import classification_report
1 D! b1 j5 I. c3 ]2 T0 J% C, h* Z4 x
target_names = ['Class-' + str(int(i)) for i in set(y)]+ C4 q& b2 g+ B! U+ u/ F+ U* h6 O$ s
print "\n" + "#"*30
! h& e9 }9 }0 X7 {3 }print "\nClassifier performance on training dataset\n"
- Q% m, B& {, Bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ w: U0 E2 ? Q* ?+ |
print "#"*30 + "\n"
+ w- W' r6 N8 K$ Y3 A# v4 Z8 D7 @. r- T" ~+ n! J
print "#"*30
2 u4 ^1 ]) x4 F+ i( r" V M' sprint "\nClassification report on test dataset\n". ^# }) e3 x. e. ~ _2 e9 [
print classification_report(y_test, y_test_pred, target_names=target_names)
6 q5 C; G8 T8 k/ _1 g" qprint "#"*30 + "\n"
* A G- A, J% m1 R9 p, H; _) j% Z- Q" S
|
|