- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np$ p4 g( m) i, g Q+ [
import matplotlib.pyplot as plt
0 Z" o7 h, F1 X; g! ^) @1 t4 ^: `% w5 O) R$ s
import utilities
; w9 M1 [- T( d7 L4 q- g$ U( k, A* L- A. \. g- A6 _5 _
# Load input data( [0 j# f# _' c0 _
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'$ ~! L* D4 S3 N, J5 h H9 _
X, y = utilities.load_data(input_file)
& m+ p+ C( m3 k. l% q/ P% T+ K v8 g( G$ {2 @
###############################################
, J$ j: D# M: H! D$ h# Separate the data into classes based on 'y'
. Q+ r8 A o1 j* y' N; I5 Nclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])1 ~. z8 L) @; |) r: s% h
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
4 ?, R8 C: K. ]# U
* T8 x9 b R, N8 }# A) Y# Plot the input data1 \+ F* G& q& M+ H( A
plt.figure()4 u/ _* K" k) R0 k; J
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'). G$ _: \& T$ Z7 m
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')8 \6 r" }) K0 b1 Z. C3 j
plt.title('Input data')& O' a3 _' H; Q+ O5 X: l) j
^# x0 @ b v###############################################
1 t8 B0 E# N# X# Train test split and SVM training+ }7 A) ]) F& u4 P7 \' J2 N% f
from sklearn import cross_validation) t: m5 `- e, [( U( @3 a
from sklearn.svm import SVC
) e% R' I& e4 S. l: ]/ X5 K) F- x4 F8 b% ?2 d7 w. ?: ]& c
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
7 A+ l" k. @' `4 u M" d& [6 Y' k6 C: ]
#params = {'kernel': 'linear'}2 U' z0 _3 S- R/ h6 p1 R
#params = {'kernel': 'poly', 'degree': 3}
$ ]4 B. n# t: h: Yparams = {'kernel': 'rbf'}% y% F" [) c3 c3 C9 r- s
classifier = SVC(**params)' f+ W z7 X& q( b2 b* P% x
classifier.fit(X_train, y_train)6 B" p2 u' W% h0 x) \2 p4 K4 P# G
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'). i% I1 \& O: [ j& x! V/ a4 x
( J% `! B% H1 o. N7 f$ c+ sy_test_pred = classifier.predict(X_test)
4 Q/ q# C0 b9 y+ q% q6 N xutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')4 U) u8 L# H' G
9 w- @1 i+ e% R* S* L* y###############################################+ l1 C# f" a8 r8 i3 Z% u- p
# Evaluate classifier performance
$ |5 z% a9 k u$ c7 y0 @9 E U' L7 a" G/ U
from sklearn.metrics import classification_report3 C7 F- @1 j7 D5 X& w
, W7 @/ V6 d% s& @# ^; ^target_names = ['Class-' + str(int(i)) for i in set(y)]
) b3 T' U b5 t$ G( jprint "\n" + "#"*30! t% o5 q( ?: [7 p
print "\nClassifier performance on training dataset\n"+ j5 t! h( n' r
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
0 F2 k. k6 j% @6 R3 l1 T' iprint "#"*30 + "\n"2 C6 Q5 @. o$ h% x0 P5 Q0 V
' h. h8 ?" Y T' U4 Q. M* f
print "#"*30% K( a* f# o0 m: {
print "\nClassification report on test dataset\n") Z2 k% J+ J1 r2 m+ N
print classification_report(y_test, y_test_pred, target_names=target_names)0 z k3 o- G5 ]9 U, B6 ~$ F
print "#"*30 + "\n"* t4 |7 z' N) ^; H3 ~9 Y
0 ~* t4 f6 o9 d7 ^$ f: \ |
|