Skip to content

Commit

Permalink
fixed bugs and issues in architectures.py
Browse files Browse the repository at this point in the history
  • Loading branch information
azahed98 committed Sep 16, 2018
1 parent 7ac69ac commit 70ae74a
Showing 1 changed file with 12 additions and 10 deletions.
22 changes: 12 additions & 10 deletions algorithms/architectures.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import tensorflow as tf


def FeedForward(self, _input, hparams, name="ffn"):
def FeedForward(_input, hparams, name="ffn"):
"""
Builds a Feed Forward NN with linear output
Expand Down Expand Up @@ -79,7 +79,7 @@ def MakeRNNCell(rnn_layer_sizes,
return cell


def DynamicRNN(self, _input, hparams, initial_state=None, name="lstm"):
def DynamicRNN(_input, hparams, initial_state=None, name="lstm"):
"""
Builds andand executes Dynamic RNN with specified activation
Expand Down Expand Up @@ -112,19 +112,21 @@ def DynamicRNN(self, _input, hparams, initial_state=None, name="lstm"):
hparams['activation'] = tf.tanh

# Build RNN Cell
rnn_cell = MakeRNNCell(hparams['rnn_layer_sizes'],
hparams['dropout_keep_prob'],
hparams['attn_length'],
hparams['base_cell'],
hparams['residual_connections'],
hparams['activation'])
with tf.variable_scope(name):
rnn_cell = MakeRNNCell(hparams['rnn_layer_sizes'],
hparams['dropout_keep_prob'],
hparams['attn_length'],
hparams['base_cell'],
hparams['residual_connections'],
hparams['activation'])

outputs, states = tf.nn.dyanimc_rnn(rnn_cell, _input, initial_state)
outputs, states = tf.nn.dynamic_rnn(rnn_cell, _input, initial_state=initial_state,
dtype=_input.dtype)

return outputs, states


def CNN(self, _input, hparams, name="cnn"):
def CNN(_input, hparams, name="cnn"):
"""
Builds a Convolutional Neural Network with a flattened output
Expand Down

0 comments on commit 70ae74a

Please sign in to comment.