-
Notifications
You must be signed in to change notification settings - Fork 0
/
MLAlgo.py
112 lines (90 loc) · 3.56 KB
/
MLAlgo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
'''
Wrapper for all MachineLearning algorithm
'''
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
import numpy as np
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation
from keras.layers import Conv1D, MaxPooling1D, Flatten
from keras import backend as K
class KNN:
def __init__(self, n_neighbors):
self.knn = KNeighborsClassifier(n_neighbors=n_neighbors)
def train(self, X_train, y_train):
self.knn.fit(X_train, y_train)
def score(self, X_test, y_test):
return self.knn.score(X_test, y_test)
def predict(self, X_test):
return self.knn.predict(X_test)
class SVM:
def __init__(self):
self.svm = SVC(C=32)
def train(self, X_train, y_train):
self.svm.fit(X_train, y_train)
def score(self, X_test, y_test):
return self.svm.score(X_test, y_test)
def predict(self, X_test):
return self.svm.predict(X_test)
class NeuralNetFeatures:
def __init__(self, num_classes):
self.batch_size = 128
self.epochs = 20
self.num_classes = num_classes
self.model = Sequential()
self.model.add(Dense(128, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(32, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(Dense(self.num_classes, activation='softmax'))
self.model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
def train(self, X_train, y_train):
y_train = keras.utils.to_categorical(y_train, self.num_classes)
self.model.fit(X_train, y_train,
batch_size=self.batch_size,
epochs=self.epochs,
verbose=1,
validation_split=0.1,
shuffle=True)
def score(self, X_test, y_test):
y_train = keras.utils.to_categorical(y_test, self.num_classes)
score = self.model.evaluate(X_test, y_test, verbose=0)
return score
def predict(self, X_test):
y_predict = self.model.predict(X_test)
y_out = np.argmax(y_predict, axis=1)
return y_out
class NeuralNetRaw:
def __init__(self, num_classes):
self.batch_size = 128
self.epochs = 20
self.num_classes = num_classes
self.model = Sequential()
self.model.add(Conv1D(filters=64, kernel_size=3, activation='relu'))
self.model.add(Dropout(0.5))
self.model.add(MaxPooling1D(pool_size=2))
self.model.add(Flatten())
self.model.add(Dense(128, activation='relu'))
self.model.add(Dense(self.num_classes, activation='softmax'))
self.model.compile(loss=keras.losses.categorical_crossentropy,
optimizer='adam',
metrics=['accuracy'])
def train(self, X_train, y_train):
y_train = keras.utils.to_categorical(y_train, self.num_classes)
self.model.fit(X_train, y_train,
batch_size=self.batch_size,
epochs=self.epochs,
verbose=1,
validation_split=0.1,
shuffle=True)
def score(self, X_test, y_test):
y_train = keras.utils.to_categorical(y_test, self.num_classes)
score = self.model.evaluate(X_test, y_test, verbose=0)
return score
def predict(self, X_test):
y_predict = self.model.predict(X_test)
y_out = np.argmax(y_predict, axis=1)
return y_out