- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
9 m) B# J O- s& Kimport matplotlib.pyplot as plt
& C5 n4 V2 ]; @3 a$ z$ Z7 A& k( A# F# O: ^4 j2 g
import utilities
/ A: {: i, g) \7 }/ n& _ e4 r6 Q4 x j3 E, m q* C
# Load input data* c0 L, n/ ?5 J- _& r
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
; ~& U% P+ S" [- T# WX, y = utilities.load_data(input_file)
! |$ k1 x* @+ d/ F: P
1 ^$ D( J `% C- @7 Z( i9 M; ^###############################################
6 }6 b- {4 _ L T4 F# Separate the data into classes based on 'y'
1 T$ T* n( t$ _ ?' lclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
; L4 `; x: [: d, g% Wclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
1 ?8 R# M! D: G( [+ N5 F& Y: |8 P/ q$ e4 D- y" l
# Plot the input data/ c. ?+ k) W; j4 g/ c
plt.figure()# ]+ u: k0 B N3 y( q4 p* {
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
. k2 F g' B: y4 I- X% Rplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')) P# I: r# m; E2 a9 y- t! A
plt.title('Input data')
( W+ b% G5 @9 G+ V
' W: M6 ]+ W; y& A###############################################6 ~8 b! Q6 [3 Y8 }
# Train test split and SVM training
1 `0 P9 G |; w1 s6 u8 v% Z; afrom sklearn import cross_validation9 }: K9 z( C' `; ]& A$ X3 x
from sklearn.svm import SVC
3 t- g8 C L$ _! X* d5 V4 z; Z8 G* V H0 j9 Y
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
2 J G ^8 B+ P( v5 A! K
" P/ K5 q* Q8 I! k' U#params = {'kernel': 'linear'}
$ J, i" `, v! C( W) B" Z#params = {'kernel': 'poly', 'degree': 3}( S% N3 |- K! D* V2 O( @
params = {'kernel': 'rbf'}
# Q/ j3 j! ^3 O* n- x3 t1 `5 O9 Sclassifier = SVC(**params)( K0 |1 K7 v& D4 T/ V e
classifier.fit(X_train, y_train)
9 o/ Q: ]: J/ Vutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
1 a6 |! h$ M6 f, D( g, N4 T+ H9 `- h1 Z! L5 E5 e
y_test_pred = classifier.predict(X_test)2 q& ?# D, U2 Y$ Q
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')3 ~! R$ k5 U% `
# N/ N* u& b7 E5 }6 V; w* u$ I
###############################################
, @! O9 ?" u7 l" J `# Evaluate classifier performance( _% K/ O& s* I1 A; }$ a% m
7 F: K/ ~! g% k7 m9 H' s+ j1 a9 j
from sklearn.metrics import classification_report
; F( `5 B/ _) x( _: g* D
# k# H' d0 N/ P( B ]9 htarget_names = ['Class-' + str(int(i)) for i in set(y)]
6 I0 G. U- C0 @; e% W4 `print "\n" + "#"*30
2 G! r$ s. p4 Z) K0 o" C4 Rprint "\nClassifier performance on training dataset\n"7 J+ z( i+ l$ f, Z7 I/ |' Q1 E; c0 `
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)4 E0 U' ^1 c6 O" g
print "#"*30 + "\n"5 L& G( T0 j4 ^8 H3 i, o7 q
- Z3 u' L; _& ?0 c/ g8 ?$ l
print "#"*303 @. T: J4 ?7 Q; @
print "\nClassification report on test dataset\n". f& D2 T4 ]0 [& u5 b: T* R0 V
print classification_report(y_test, y_test_pred, target_names=target_names)
5 h. g: H' R0 a+ dprint "#"*30 + "\n"
% ]4 D G, N: h* P$ B2 R
5 g* |, q1 ]3 j6 n1 S |
|