- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np# Y+ r3 X/ }4 d3 R y
import matplotlib.pyplot as plt3 t. K7 P. l5 B; X. B
3 c; u1 H9 Z8 k3 S# n4 r8 N
import utilities
5 ^; H. O8 O# [3 n% I9 X- h9 c. S- c
# Load input data: J* j( [! d' {3 d2 M
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'; d5 J1 a/ r: m1 V _5 j1 U
X, y = utilities.load_data(input_file)
7 u3 e# [: n, t1 n# u$ T# x5 B& l+ O0 [# G* @( J3 j# u
###############################################* D# \+ _6 C1 ^* H) A" O, {
# Separate the data into classes based on 'y'
% H9 h8 Q2 v2 Q, lclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
z, F) t/ l& V6 O( fclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])3 E' e h! `5 s/ L% ?0 @) j
# h3 N( x3 G3 m$ V$ s- D9 X# Plot the input data8 y% l, N% B7 ~7 f/ s0 m2 y1 {" L" k
plt.figure()
& r9 D9 n3 Q$ Oplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 e: m3 \, n( _' K
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')6 C3 E7 E& O( J8 v! e
plt.title('Input data'), t: p* u+ S% Y) \( k# m: U
, [7 `! V( ~7 b ?$ G0 Z
###############################################
1 A* w+ M U& z: c3 C4 y, Z3 g9 @# Train test split and SVM training3 }. Y! T+ A G n
from sklearn import cross_validation: h2 k% A" z+ S
from sklearn.svm import SVC. h# D3 Z0 c4 Y3 V( J# i
/ ]& q/ \# {$ L; ^/ Y2 [
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
3 e8 ~ I1 s% v0 q
3 p% X; E7 r1 g1 J#params = {'kernel': 'linear'}
- h- E* V; Z& \4 j6 g' Z6 R#params = {'kernel': 'poly', 'degree': 3}
) ]) t# I* v. D- A$ O( `7 L5 xparams = {'kernel': 'rbf'}2 l' M C; h2 ?8 K4 u. L: a; H' G; y
classifier = SVC(**params)# l2 o; ^# F! D Z# o
classifier.fit(X_train, y_train)
2 ?8 O6 s" J- o8 A+ S5 butilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'), g" {* e( G8 _/ F1 g/ U
* }. H$ q0 p5 m( ^9 o" G3 |9 Y0 s0 d
y_test_pred = classifier.predict(X_test)" G6 o0 C! U1 n
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')" |4 M- Q6 } \8 A' g
' h2 P5 i Z# d
###############################################
$ x- x$ g# k. r# Evaluate classifier performance1 t% J4 ?& P5 D5 e0 H8 z
& E/ z% I2 C, z; d) f* f4 [ sfrom sklearn.metrics import classification_report: f; r7 p# i j C* y& S
& P' a! c# `7 q: u# a
target_names = ['Class-' + str(int(i)) for i in set(y)]) V6 C' O5 v! W$ h
print "\n" + "#"*30
' D5 o$ M5 F. T2 k; Eprint "\nClassifier performance on training dataset\n"
, A$ ]5 T9 w& R" Y7 P5 Q4 \& t" ^3 A# ?print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
. ^0 c8 q/ m) b' f* `print "#"*30 + "\n"
* G/ A9 k. s+ s
# _* i- o) j8 @/ ]0 h2 H4 Iprint "#"*30
4 b( k; A, z3 b& t Qprint "\nClassification report on test dataset\n"
# B G: C7 R( P/ H9 R( \, n4 Vprint classification_report(y_test, y_test_pred, target_names=target_names)
4 [, x# O% k8 n# {* a# _print "#"*30 + "\n"2 \( |9 ?- B, ]) J8 A& Q
! m! O9 q$ p; V8 P( ~ |
|