Skip to content

Commit 534b166

Browse files
committed
-
1 parent 097f9db commit 534b166

File tree

6 files changed

+1
-51
lines changed

6 files changed

+1
-51
lines changed

__pycache__/layers.cpython-36.pyc

-1.07 KB
Binary file not shown.

layers.py

+1-49
Original file line numberDiff line numberDiff line change
@@ -41,11 +41,7 @@ def __call__(self, inputs):
4141
with tf.name_scope(self.name):
4242
outputs = self._call(inputs)
4343
return outputs
44-
45-
def log_weights(self):
46-
for w in self.weights:
47-
tf.summary.histogram(self.name + '/weights/' + w, self.weights[w])
48-
44+
4945
class ConvolutionalLayer(Layer):
5046
def __init__(self, input_dim, output_dim, placeholders, dropout,
5147
sparse_inputs, activation, isLast=False, bias=False, featureless=False, **kwargs):
@@ -97,47 +93,3 @@ def _call(self, inputs):
9793
output += self.weights['bias']
9894

9995
return self.activation(output)
100-
101-
class DenseLayer(Layer):
102-
def __init__(self, input_dim, output_dim, dropout, sparse_inputs,
103-
placeholders=None, activation=tf.nn.relu, bias=False, featureless=False, **kwargs):
104-
super(DenseLayer, self).__init__(**kwargs)
105-
self.dropout=0.5
106-
107-
if dropout:
108-
self.dropout = placeholders['dropout']
109-
else:
110-
self.dropout = 0.
111-
112-
self.activation = activation
113-
self.sparse_inputs = sparse_inputs
114-
self.featureless = featureless
115-
self.bias = bias
116-
117-
# helper variable for sparse dropout
118-
#self.num_features_nonzero = placeholders['num_features_nonzero']
119-
120-
with tf.variable_scope(self.name + '_weights'):
121-
self.weights['weights'] = glorot([input_dim, output_dim],
122-
name='weights')
123-
if self.bias:
124-
self.weights['bias'] = zeros([output_dim], name='bias')
125-
126-
127-
def _call(self, inputs):
128-
x = inputs
129-
130-
# applico il dropout
131-
#if self.sparse_inputs:
132-
# x = sparse_dropout(x, 1-self.dropout, self.num_features_nonzero)
133-
#else:
134-
x = tf.nn.dropout(x, 1-self.dropout)
135-
136-
# la moltiplicazione fra features e pesi - in questo consiste il layer dense
137-
output = dot(x, self.weights['weights'], sparse=self.sparse_inputs)
138-
139-
# eventualmente applico il bias sommandolo all'output
140-
if self.bias:
141-
output += self.weights['bias']
142-
143-
return self.activation(output) #l'uscita passa prima per la funzione di attivazione - una relu

tmp/checkpoint

-2
This file was deleted.

tmp/gcn.ckpt.data-00000-of-00001

-90 KB
Binary file not shown.

tmp/gcn.ckpt.index

-220 Bytes
Binary file not shown.

tmp/gcn.ckpt.meta

-74.4 KB
Binary file not shown.

0 commit comments

Comments
 (0)