- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np* a0 N! w) {' N8 ~0 I6 k
import matplotlib.pyplot as plt- u9 G$ Y3 |( w* K# w% B
8 A5 }! e6 j2 Gimport utilities
9 D# P' @5 I) p0 b5 u# O6 C$ n9 R- {, ~* ~+ W
# Load input data
; U$ v8 L( s* a/ A: ]- _' {! rinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'4 Y6 F, s& q$ y1 n; t; g
X, y = utilities.load_data(input_file)
) C* T6 u3 l9 e1 g* a* y
, W. v% t; v2 l$ U2 `###############################################
$ k2 Z) a1 c+ D9 _, k# Separate the data into classes based on 'y'
: `" J4 h- ]6 Mclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])) F. O. ~9 Z" z; `- Q, h
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
9 S, b1 W1 L8 N7 S) y% @! ?2 ]. ^, g
+ t M8 K) y2 ^: F# Plot the input data; a, I0 Y9 Z! X5 P$ F
plt.figure()6 Y/ }/ V# _- z7 m( J/ [6 @' Q t
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
: f* g9 I( J& R& r1 Jplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')) U5 U8 T4 n/ O* T
plt.title('Input data')" v8 T0 l0 _4 X4 k* P
2 t. j1 L( Q1 f+ S" e& b- ]6 L. E
############################################### k8 A4 G4 z2 j4 K) V
# Train test split and SVM training
: i; t$ o. D6 ~1 bfrom sklearn import cross_validation; w5 S! v9 m( l; k9 x* S* f- a5 w1 s' w
from sklearn.svm import SVC
2 L" e7 w0 x+ E+ e2 Z8 H' X$ Q2 G: ]6 N( Y; r
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
( Y Z& C" p2 a+ y; j$ ]/ v5 D
1 a6 Y. I) A% E; K' G#params = {'kernel': 'linear'}
% r+ f2 k6 C9 j) \% N- ~; S7 v4 q' `#params = {'kernel': 'poly', 'degree': 3}
! v: W" E5 o, N2 t4 c8 hparams = {'kernel': 'rbf'}
4 J+ n' v- k! K! K: B' J3 Cclassifier = SVC(**params): ~% o4 _+ [' G' C6 \
classifier.fit(X_train, y_train)" D" I# ?% { a/ r
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
; J' B5 j6 k E% J9 ]4 y" C; U; n4 U5 v9 ^ |7 Z
y_test_pred = classifier.predict(X_test)
9 c1 P2 B. c8 K4 c' p# g: Y# o! ] u9 _utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 X3 V; ~7 U' k
+ u' P+ W2 j1 A M* q4 a$ Y###############################################
! H! O; y* c4 p# Evaluate classifier performance- R. S2 e. q0 \, M) ^
_& D" L" p) E0 w* q. n8 x/ x
from sklearn.metrics import classification_report
' R! G C; s; B B8 U" F+ T* y7 ^! Q/ `0 l
target_names = ['Class-' + str(int(i)) for i in set(y)]
0 ?9 U" b' c; V( i# [& xprint "\n" + "#"*30
7 W# h% }( J: j* s* ~* k; n& ?# oprint "\nClassifier performance on training dataset\n"6 z/ ?0 f( r- ^1 X# A" W
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
: @! b( c. s' {4 T6 z2 Mprint "#"*30 + "\n"
) ?7 U; B$ u7 V) l) g: U& I- ]: d2 R9 Y% F
print "#"*30 p& O) { n8 O8 O
print "\nClassification report on test dataset\n"7 o. M4 {) e' E/ d# W& t: y
print classification_report(y_test, y_test_pred, target_names=target_names)
5 _% M) S( v( Q; M0 X& o" ~print "#"*30 + "\n"7 ?7 G$ R1 ]8 B' U
6 l/ }4 N& x+ Q: M
|
|