- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
# R) Z, G2 J. K v! E& Nimport matplotlib.pyplot as plt5 A# J: s% l, B* g) v( F2 S. q
2 w: U/ h6 ]2 b. ?0 C! d
import utilities
( |) B. \* P9 Q' _* `, w
; N" o2 @8 S3 z& k( `3 [# Load input data0 ]* H" p9 L8 G/ h6 N Y
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
9 @; o ?5 v2 m+ Z7 g7 u0 s1 U3 `X, y = utilities.load_data(input_file)
9 N4 d* |1 x, A: U! U1 r
. @0 h8 @/ B2 Z6 A( n C& V* c/ X###############################################) r6 f% Q' k0 X8 k$ g5 _& r0 Z# E2 l
# Separate the data into classes based on 'y'* Q( }, S# {# o! V5 ?
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
, \! i- v" T% M- N8 Aclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
9 m! V: A4 U5 J* F
{# ^# D8 v8 J ^% {1 m# Plot the input data
" Z. D0 [& w5 k; }plt.figure()5 x# k) p3 Z- @+ C) @! c5 T
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')' N7 F9 R! J( y! ^. f9 u- J
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ {6 |2 E% X$ a6 N/ `1 d* V
plt.title('Input data')
0 ]* U, f, ]$ O8 X5 G2 x# X4 r4 ]" e
###############################################0 I% k* o5 U2 ?$ a7 Q
# Train test split and SVM training' H* T" k J3 e& T
from sklearn import cross_validation( P: b: D$ v( d, X7 {0 |" M7 ?3 _
from sklearn.svm import SVC5 M/ x) t# S _4 }
( l$ e' m6 ?! u8 l) V$ Q1 v
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
- _- Y. X8 ~4 ?' p! n* U" T& z$ Q4 s2 l# Q
#params = {'kernel': 'linear'}# E4 c5 g8 V( x$ d4 Z% D1 R! R
#params = {'kernel': 'poly', 'degree': 3}& M: f7 D& L5 v7 _1 C; H
params = {'kernel': 'rbf'}; R9 Y( ~2 i1 u7 R
classifier = SVC(**params)
5 ?7 `/ _1 J8 w4 eclassifier.fit(X_train, y_train)
& t! f0 J$ Y2 j. mutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')) x/ K+ x# S& C% z: ^: A* D
: \' p& s. D$ v4 Y
y_test_pred = classifier.predict(X_test)- c6 p U7 u" Y
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
/ F# K2 @# g4 N3 V- Q7 G. M* N2 C6 @+ h8 C: x
###############################################
" k8 ^' E8 r* d Y# Evaluate classifier performance! W5 ~" k* k3 s
+ [$ r. l, Q$ a5 `/ H1 j; Y2 H- w
from sklearn.metrics import classification_report
+ o5 r, ~$ a2 u+ q6 p3 f; w' l4 d9 ?# A7 D4 v
target_names = ['Class-' + str(int(i)) for i in set(y)]
$ V6 B& Y& L, U5 M0 `* K8 H3 k0 }- h( _print "\n" + "#"*30
' @0 | a% \- q# ]4 Tprint "\nClassifier performance on training dataset\n"" n+ m/ u# v! F
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)& c% s% X: X. V3 Y8 ]9 P; n
print "#"*30 + "\n"
- {2 V3 u% G( H: d7 _# ]4 T3 t, L" h
print "#"*30
) A# a: Y; F8 f# q4 k8 lprint "\nClassification report on test dataset\n"* {( Q ]( p4 B Q% b9 }) t
print classification_report(y_test, y_test_pred, target_names=target_names)( y0 Q& K' i& T, p, ]# a
print "#"*30 + "\n") d. B& E) H. k! E1 ~: e
3 e" I o" X4 ]5 Q
|
|