- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
" f* X3 {, C$ s5 K* A6 @! ~" Q) iimport matplotlib.pyplot as plt
, N I2 s1 H4 i ^* V7 d6 F
: \! x) b1 j' {: Y4 J: Mimport utilities 9 f9 @9 u; u6 E. g) a
\: U' \' V" q! X
# Load input data7 g+ M- ]# w8 _* @
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
; C" s2 k* p2 H7 Z, vX, y = utilities.load_data(input_file) ]* @) {6 r, e. y0 Z
4 Y( s* E1 y, L% Q( z4 D
###############################################4 S. [5 n/ y9 e! p+ d! A1 j
# Separate the data into classes based on 'y'
. @+ B3 T3 ~6 d& eclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]): Z* |$ I. [+ c% a
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
7 {4 ?" ?/ A/ X5 f3 r
' f8 Z& @, ?3 w E# Plot the input data) O! ? M) N& E( o! s
plt.figure()/ q) ?2 b' W( U4 J( }4 o
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
Z3 _. j2 S8 \ q4 A! x O1 zplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ c" r- t. L& z' }1 b
plt.title('Input data')
# x* ]" G. E+ Z _6 s6 R" O6 n8 ~- _/ q
###############################################/ o2 E% j3 K9 Q; @* z- e$ K
# Train test split and SVM training
9 V q1 h2 {$ Y2 {from sklearn import cross_validation
6 a2 |" t E# ifrom sklearn.svm import SVC; k/ x) [4 S" A; M/ a* @
* u1 P( }% o+ {9 l3 s
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)$ c/ J3 |, V& @7 J' G6 r
# y, d. ~ B, Z$ W5 T, E#params = {'kernel': 'linear'}
- ]$ P' U# j1 O3 F8 X/ Q2 z#params = {'kernel': 'poly', 'degree': 3}- f: Q1 M" {4 k* Q# Y
params = {'kernel': 'rbf'}
8 q0 d% C B' J, r' j+ Vclassifier = SVC(**params)& A! D. ^0 `+ I# |1 L/ b
classifier.fit(X_train, y_train)7 t+ G _& G3 T8 I; s: L
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
2 C2 e$ @% O2 g5 ?3 c
; t* g, c: s% Wy_test_pred = classifier.predict(X_test)
! }8 U) s2 b, g! lutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
9 s* `: |! N9 _3 w
7 T/ f0 C0 C3 u###############################################% v- O1 ]! _% m
# Evaluate classifier performance
* o0 C1 o& {* |) k
4 B! d3 `( Z- ^* i" Jfrom sklearn.metrics import classification_report9 U0 H+ k c; k T
' _, Z/ E: O$ ^0 `
target_names = ['Class-' + str(int(i)) for i in set(y)]
! v, K3 [" A$ {0 kprint "\n" + "#"*30/ B) ^5 C' V" K3 [
print "\nClassifier performance on training dataset\n"
5 [. H+ q/ h5 z3 r; ]print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
; \5 ?" ]; H: K/ H3 ~% aprint "#"*30 + "\n". ~4 J6 F, ^1 V
0 V* [! R# [* P( e2 U: Y) eprint "#"*30/ D) d2 f8 F# O9 o T* c# ]; r, r
print "\nClassification report on test dataset\n"
' S- ^ i% W( s t5 Z3 d9 c& Wprint classification_report(y_test, y_test_pred, target_names=target_names)/ H- [: |# b. [) C
print "#"*30 + "\n"
* N+ G& |- }6 F& n$ F9 [
, q4 E7 O! |: X* J |
|