1

Trying to tying the weights of the encoder and decoder layer but getting this unknown error for the Reshaping layer. My tensorflow version is up to date, and I imported the layers from tensorflow.keras.

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-59-6db796bd6af3> in <module>()
      7                             DenseTranspose(dense_2,activation="selu"),
      8                             DenseTranspose(dense_1,activation="sigmoid"),
----> 9                             keras.layers.Reshape([28,28])
     10 ])
     11 tied_ae = keras.models.Sequential([tied_encoder, tied_decoder])

2 frames
/usr/local/lib/python3.7/dist-packages/keras/engine/sequential.py in add(self, layer)
    176         layer = functional.ModuleWrapper(layer)
    177     else:
--> 178       raise TypeError('The added layer must be an instance of class Layer. '
    179                       f'Received: layer={layer} of type {type(layer)}.')
    180 

TypeError: The added layer must be an instance of class Layer. Received: layer=<__main__.DenseTranspose object at 0x7f207c95a690> of type <class '__main__.DenseTranspose'>.

Code to reproduce the error:

import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers

class DenseTranspose():
  def __init__(self,dense,activation=None,**kwargs):
    self.dense = dense
    self.act_ = keras.activations.get(activation)
    super().__init__(**kwargs)
  def build(self,batch_input_shape):
    self.biases = self.add_weight(name="bias",initializer="zeros",shape=[self.dense.input_shape[-1]])
    super().build(batch_input_shape)
  def call(self,inputs):
    z = tf.matmul(inputs,self.dense.weights[0],transpose_b=True)
    return self.act_(z + self.biases)
dense_1 = layers.Dense(100,activation="selu")
dense_2 = layers.Dense(30,activation="selu")
tied_encoder = keras.models.Sequential([
                           layers.Flatten(input_shape=[28,28]),
                           dense_1,
                           dense_2
])
tied_decoder = keras.models.Sequential([
                            DenseTranspose(dense_2,activation="selu"),
                            DenseTranspose(dense_1,activation="sigmoid"),
                            keras.layers.Reshape([28,28])
])
tied_ae = keras.models.Sequential([tied_encoder, tied_decoder])
tied_ae.compile(loss="binary_crossentropy",optimizer=keras.optimizers.SGD(learning_rate=1.5), metrics=[rounded_accuracy])
history = tied_ae.fit(X_train, X_train, epochs=10,
                      validation_data=(X_valid, X_valid))
1
  • 1
    You forgot that your layer has to inherit from keras.layers.Layer Commented Nov 25, 2021 at 15:36

1 Answer 1

0

To complete the answer, as noted by Dr. Snoopy - the layer you created needs to inherit from keras.layers.Layer, so your class should start:

class DenseTranspose(layers.Layer):

Your full code would then be:

import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers

class DenseTranspose(layers.Layer):
  def __init__(self,dense,activation=None,**kwargs):
    self.dense = dense
    self.act_ = keras.activations.get(activation)
    super().__init__(**kwargs)
  def build(self,batch_input_shape):
    self.biases = self.add_weight(name="bias",initializer="zeros",shape=[self.dense.input_shape[-1]])
    super().build(batch_input_shape)
  def call(self,inputs):
    z = tf.matmul(inputs,self.dense.weights[0],transpose_b=True)
    return self.act_(z + self.biases)
dense_1 = layers.Dense(100,activation="selu")
dense_2 = layers.Dense(30,activation="selu")
tied_encoder = keras.models.Sequential([
                           layers.Flatten(input_shape=[28,28]),
                           dense_1,
                           dense_2
])
tied_decoder = keras.models.Sequential([
                            DenseTranspose(dense_2,activation="selu"),
                            DenseTranspose(dense_1,activation="sigmoid"),
                            keras.layers.Reshape([28,28])
])
tied_ae = keras.models.Sequential([tied_encoder, tied_decoder])
tied_ae.compile(loss="binary_crossentropy",optimizer=keras.optimizers.SGD(learning_rate=1.5), metrics=[rounded_accuracy])
history = tied_ae.fit(X_train, X_train, epochs=10,
                      validation_data=(X_valid, X_valid))
Sign up to request clarification or add additional context in comments.

Comments

Start asking to get answers

Find the answer to your question by asking.

Ask question

Explore related questions

See similar questions with these tags.