Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adição de modelo #65

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
initial implementation of NeuralModel
  • Loading branch information
trevisharp committed Oct 13, 2024
commit e3a96bb7f2f0297187616850c531ed9c3c666e47
71 changes: 71 additions & 0 deletions bibmon/_neural_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
from ._generic_model import GenericModel

from keras.models import Sequential
from keras.layers import LSTM, Dense, Dropout
from keras.optimizers import Adam

###############################################################################

class NeuralModel(GenericModel):
"""
Model that uses Keras to apply Deep Learning to find anomaly.

Parameters
----------
outputCount: int
The quantity of possible states for the process (normal, anomaly, possible anomaly, etc)
columCount: int
The number of columns/features this model can handle
lstmshapes: list, optional
Whether permutation variable importance should be calculated.
dropout: float, optional
The dropout of each LSTM layer.
"""

###########################################################################

def __init__ (self, columCount, outputCount,
lstmshapes=[128, 64], denseshapes=[16, 32], dropout = 0.2):

self.model = Sequential()

self.model.add(LSTM(lstmshapes[0], stateful=True, return_sequences=True, batch_input_shape=(1, 1, columCount)))
self.model.add(Dropout(dropout))

for shape in lstmshapes[1:-1]:
self.model.add(LSTM(shape, stateful=True, return_sequences=True))
self.model.add(Dropout(dropout))

if len(lstmshapes) > 1:
self.model.add(LSTM(lstmshapes[-1], stateful=True, return_sequences=False))
self.model.add(Dropout(dropout))

for shape in denseshapes:
self.model.add(Dense(shape, activation='relu'))

self.model.add(Dense(outputCount, activation='softmax'))

optimizer = Adam(learning_rate=0.001)
self.model.compile(loss='categorical_crossentropy', optimizer=optimizer, metrics=['accuracy'])

###########################################################################

def train_core (self):
self.model.fit(
self.X_train.values,
self.Y_train.values.squeeze(),
epochs=20,
batch_size=64,
validation_split=0.2
)

###########################################################################

def map_from_X(self, X):
return self.model.predict(X)

###########################################################################

def set_hyperparameters (self, params_dict):
for key, value in params_dict.items():
setattr(self.regressor, key, value)