- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
: S% _% R5 y& }% \% Vimport matplotlib.pyplot as plt
1 \6 Y5 W- f3 v. w, h! W' }5 F& ^, `( v3 q" V
import utilities
% P+ h9 E1 Z. d, V! `+ b' M: W
% m( ~' ]" w: [# Load input data, T1 q' O. d- `- y+ Z1 z. ~
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'' W2 D& _' i4 |( V4 H
X, y = utilities.load_data(input_file)* ]7 n0 Y/ r& \: H" X
$ b' Q$ Y6 n9 P! L6 P0 S$ x###############################################
5 X, l$ |* J! s. d# B+ Z) c4 H% q8 R# Separate the data into classes based on 'y'
- j H0 B$ O4 f7 _, Y7 E5 uclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])' E9 e- N5 ]7 m" D7 |# v
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])9 }& Z8 ?& I; R
7 a$ Q0 ~: e8 E! z! s3 m) q8 _
# Plot the input data+ Y7 Y/ o" z+ d8 p3 l4 ~, e6 M
plt.figure()
& r3 I! n2 i- p& G+ b7 o; n {; Fplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
3 l& P) q' ^; o# E" g6 { Tplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ p, `! D+ s; r' n$ [ N7 e+ _! |9 ]
plt.title('Input data')7 g: C* I+ U6 }$ f0 n( x
) n$ o+ F1 |/ M: `) Q9 j5 K
###############################################
5 P7 Y1 L& Y6 l2 ~1 _# Train test split and SVM training+ X- J' [" k5 h8 B1 X+ t% U
from sklearn import cross_validation; P9 @+ `! w9 L7 n/ d
from sklearn.svm import SVC7 `& h. [6 B0 w2 N1 Z
( x) s; O3 Y' K& I' _X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
; C- c- ?- L* L7 u+ _4 l
' h5 a- [. d `0 B" s+ N#params = {'kernel': 'linear'}
; C& c- W8 ]1 T7 C#params = {'kernel': 'poly', 'degree': 3}
# T3 D! P, c* w# v! a2 N& |params = {'kernel': 'rbf'}
& R6 x5 k0 b) I7 h+ fclassifier = SVC(**params)6 z2 f" ^2 H4 t& f4 N, b4 `
classifier.fit(X_train, y_train)% w5 u* L e8 U+ T7 e+ c3 g% z; ^
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')* @2 h) @4 ~! A1 J! y! N
' H( @: C+ Y- a2 m/ I3 T/ o* Uy_test_pred = classifier.predict(X_test); n9 B" _& @3 ]2 A6 o* e F* R
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
8 p. \0 P. z) A0 R/ b2 U. e' J# E
; W1 Z! D* j9 H" a: V################################################ a2 s3 g3 e T" y, C5 |7 H! w
# Evaluate classifier performance# x. @: u2 f, p
* j' B0 t8 x. r: j& v! r; `
from sklearn.metrics import classification_report
& L6 |2 ]$ H- D) `% Z& i5 y1 G8 ]% ?" \
target_names = ['Class-' + str(int(i)) for i in set(y)]2 ^% G4 Y0 Y* _1 k# D
print "\n" + "#"*30
8 @6 p9 d1 G n; u8 ?print "\nClassifier performance on training dataset\n"
) l0 \/ u7 R; M. c7 v5 v. [5 Rprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
! ~1 {/ A" B- wprint "#"*30 + "\n" L- t1 h& p* n. U. _$ w
/ }) |7 X" Q% D: w8 X& c
print "#"*30
* S4 ^* m/ u( X0 {" }7 R6 v. s qprint "\nClassification report on test dataset\n"7 t. d5 M# u; I1 F
print classification_report(y_test, y_test_pred, target_names=target_names)
4 V H7 V* ~- g6 j9 ^2 n- ?print "#"*30 + "\n"/ m. e- Y) ]+ Z# ?' P3 H/ v" u
$ O6 a2 @) f- g9 x9 z( u8 ~2 z |
|