- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 n" C0 c$ D4 a9 H7 _) _
import matplotlib.pyplot as plt
5 |/ r" C( b! |6 K, F% z M3 B# c" s& d- i+ A9 |: h
import utilities
; U% M" ~6 v" m; x# Q" N. Z
, D8 s8 b7 m) B n3 }# Load input data% J; a6 } d4 k0 U& v
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: Y/ T: G# W) |7 R( j5 D* V% }X, y = utilities.load_data(input_file), w- U+ h- X2 E
6 v/ [" k: N* y. S. U8 ]
###############################################
. h1 j! n, z$ k/ ^( N# Separate the data into classes based on 'y'2 b2 I5 Q% d" q/ b1 v
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 z, r* G [5 j* J! F- b' t& h
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])) U: g6 N/ i9 ^: n/ E
8 N' Z& e; `' ^8 E4 p# Plot the input data h4 T0 H3 ?( M
plt.figure()
0 a( ?! c! x' _% Splt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
6 i- s- V7 [% S6 G0 b+ kplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')# b/ _# G7 S& k( e
plt.title('Input data')/ Y8 t: v, c, A9 x
4 b- { C2 e v7 D- k' v0 ^' v###############################################
& N# S' E8 V3 g& H, R3 A# Train test split and SVM training) q# |+ E% U( e3 F; ?
from sklearn import cross_validation
% c7 d) F ^: X) P3 Y! Jfrom sklearn.svm import SVC
; e4 i! E+ c6 _" z% S2 `/ M+ B, v" H2 g2 G# _ `. A$ ]& s
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)) ]4 D( i1 E5 k8 |# H
: m6 B; O. ^% n8 V# u J) g
#params = {'kernel': 'linear'}
: |& H( x+ P( p+ R, @( V" H#params = {'kernel': 'poly', 'degree': 3}7 I) R* F4 h5 ~0 b! P' F, i7 A
params = {'kernel': 'rbf'}
5 o; |1 \3 }* u5 Jclassifier = SVC(**params)
& `1 M8 u' Z+ G3 b! t Lclassifier.fit(X_train, y_train). U2 X) I7 _ n$ _# I
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')2 G" ^$ u3 j* L* e9 t# J- a
" |7 Z6 F. X/ ]- r: r- J0 cy_test_pred = classifier.predict(X_test)3 @. t' ^7 j: {
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')0 G7 K3 ?1 v. E( q
( X' `# p( J2 P+ h# ?
###############################################7 W1 D/ D2 w3 y, O8 c
# Evaluate classifier performance
5 ?6 f& z/ s* h; h+ }
. J$ Q& ]6 w" G3 ] ufrom sklearn.metrics import classification_report
: h9 I& b# n# R' a* d4 j0 K# m: o
1 \5 ^6 A0 r5 _" o7 w( b3 T8 Ttarget_names = ['Class-' + str(int(i)) for i in set(y)]! c1 F4 U) {, h- r. n7 u
print "\n" + "#"*30
5 f0 q9 @# a6 C2 S; F# p- Eprint "\nClassifier performance on training dataset\n"' P3 x# ~% A9 S6 V2 X6 U+ g8 F
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)$ x5 b; }! Z/ [
print "#"*30 + "\n"
8 G; L- V' e# u! u) p1 \/ C( f. A* w' O0 d" c+ {+ P/ ?! O5 |
print "#"*30
1 v& _7 L9 L" S( c( fprint "\nClassification report on test dataset\n"
2 Y2 l1 Z. H2 P6 D+ F O! n y* Wprint classification_report(y_test, y_test_pred, target_names=target_names)
# D' g8 k/ {% u; p9 U C% j( v1 v; hprint "#"*30 + "\n") z* k; a) d9 r- C1 k
, P( F3 \. _6 ^% f0 u- H
|
|