- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
/ h3 r8 v# [, f4 T6 L9 t+ bimport matplotlib.pyplot as plt9 _* G5 g( p6 ^5 m7 B* e7 e. M9 `
5 q- R t* v5 Y2 l% W6 Z7 o9 I9 }
import utilities 1 U6 O# w' l/ ^+ t4 L
% w% a* ?: s u3 w& ^, ^6 k# Load input data" L& F+ |: ]4 v% O/ Q
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
g' j# e: d9 F3 W# M* X9 CX, y = utilities.load_data(input_file)
) {) p& d7 m. I* m$ S0 K1 W( h" G! G9 B% o; y+ D `+ C
################################################ r5 W) q# M- c& X: J# P6 G$ o% f% O
# Separate the data into classes based on 'y'
) ^9 x& z: C4 ?" P5 m' [; Vclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
7 \/ ]# N/ Z/ a5 J z* eclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
5 ~1 F2 n6 D8 B& A$ R+ Z1 k) V: M' f2 h6 Y0 @( G; ]
# Plot the input data* o6 O( v1 K+ S6 s' _. C& ?
plt.figure()
+ i L- B/ Z! Q: P0 w oplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
7 R& v g! V- ~plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
2 n( u; B% z. R2 rplt.title('Input data')& |" H. H& K: s p! p( [5 e
3 B6 o1 W8 e0 q& N4 ]9 k
###############################################
% m9 ?+ E+ G) }0 _# N% s# Train test split and SVM training. m( U. B, C, x
from sklearn import cross_validation! q' d3 j/ D9 J4 I. z, u6 t
from sklearn.svm import SVC& J" ]2 |; r% l# @
: T( F1 z- j9 n# z" Q( c+ q" }
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
! K! }8 B5 {5 t* j' P, W6 q8 D2 N
#params = {'kernel': 'linear'}. {% x7 u- x- I5 B1 u1 I
#params = {'kernel': 'poly', 'degree': 3}; [2 T$ `& F: Q, V/ S. h# b) y$ l
params = {'kernel': 'rbf'}! ~4 W( Z; I- l* T' H
classifier = SVC(**params)
3 U5 d2 j/ f+ jclassifier.fit(X_train, y_train)% S& S( n: O& u5 e4 A& C
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')& w" {" a; Y( C7 O
+ f7 w& ^4 [9 Y7 D: ~( ky_test_pred = classifier.predict(X_test)& ^2 M8 k* R7 x2 \& }& v
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
/ ]) D/ w! s K% T, h5 S
" E p9 J3 ]/ I. D! {" h* b###############################################
: q( S% G) P) a w' C# Evaluate classifier performance
m! c. L7 \5 V- E5 e' d* i) j5 y0 f% C
from sklearn.metrics import classification_report
! f, e" ~1 Z4 I- [' E# @) ~2 G0 {' K8 U7 r- ]: k( P; Z* \
target_names = ['Class-' + str(int(i)) for i in set(y)]
( v3 c" M! N/ r. O" d, X+ Cprint "\n" + "#"*300 x! h! O, ~5 N, A
print "\nClassifier performance on training dataset\n"
% v4 u% i& O6 z# bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
! P0 N8 ]% z% K: G# J6 e7 B% q3 jprint "#"*30 + "\n", s" H9 _. t& c* `
8 t' E# J5 R* L1 g9 m
print "#"*30" g" y0 J6 q3 u
print "\nClassification report on test dataset\n"* j: C9 e) p3 _! h
print classification_report(y_test, y_test_pred, target_names=target_names)4 ?8 U9 w/ x. x' r
print "#"*30 + "\n", [& @. K6 ~( j+ O; f: Z
, I3 b. l- G) u9 l$ S) P0 w+ T# a
|
|