#tensorflow #machine-learning #keras #deep-learning #generative-adversarial-network
#tensorflow #машинное обучение #keras #глубокое обучение #генеративная-состязательная-сеть
Вопрос:
Я пытаюсь реализовать GAN. Я получаю AssertionError: Could not compute output Tensor("conv2d_16/BiasAdd:0", shape=(None, 64, 64, 3), dtype=float32)
. Я не знаю причины. Требуется помощь.
Вот пример кода.
S = keras.models.Sequential()
S.add(keras.layers.Dense(codings_size, input_shape = [codings_size]))
S.add(keras.layers.LeakyReLU(0.2))
S.add(keras.layers.Dense(codings_size))
S.add(keras.layers.LeakyReLU(0.2))
S.add(keras.layers.Dense(codings_size))
S.add(keras.layers.LeakyReLU(0.2))
S.add(keras.layers.Dense(codings_size))
S.add(keras.layers.LeakyReLU(0.2))
S.compile(loss = 'mse', optimizer = keras.optimizers.Adam(lr=0.0002))
# === Generator ===
#Inputs
inp_style = []
for i in range(5):
inp_style.append(keras.layers.Input([codings_size]))
inp_noise = keras.layers.Input([64, 64, 1])
#Latent
x = keras.layers.Lambda(lambda x: x[:, :128])(inp_style[0])
#Actual Model
x = keras.layers.Dense(4*4*4*8, activation = 'relu', kernel_initializer = 'he_normal')(x)
x = keras.layers.Reshape([4, 4, 4*8])(x)
x = g_block(x, inp_style[0], inp_noise, 16 * 8, u = False) #4
x = g_block(x, inp_style[1], inp_noise, 8 * 8) #8
x = g_block(x, inp_style[2], inp_noise, 6 * 8) #16
x = g_block(x, inp_style[3], inp_noise, 4 * 8) #32
x = g_block(x, inp_style[4], inp_noise, 3 * 8) #64
x = keras.layers.Conv2D(filters = 3, kernel_size = 1, padding = 'same', kernel_initializer = 'he_normal')(x)
generator = keras.models.Model(inputs = inp_style [inp_noise], outputs = x)
inp_ = keras.layers.Input(shape=list(faces.image_shape))
d = keras.layers.Conv2D(32, kernel_size =4, strides = 2, padding = 'same',
activation = keras.layers.LeakyReLU(0.2))(inp_)
d = keras.layers.Dropout(0.4)(d)
d = keras.layers.Conv2D(64, kernel_size=4, strides = 2, padding='same',
activation = keras.layers.LeakyReLU(0.2))(d)
d = keras.layers.Dropout(0.4)(d)
d = keras.layers.Conv2D(128, kernel_size=4, strides = 2, padding='same',
activation = keras.layers.LeakyReLU(0.2))(d)
d = keras.layers.Dropout(0.4)(d)
d = keras.layers.Conv2D(256, kernel_size=4, strides = 2, padding='same',
activation = keras.layers.LeakyReLU(0.2))(d)
d = keras.layers.Dropout(0.4)(d)
d = keras.layers.Conv2D(512, kernel_size=4, strides = 2, padding='same',
activation = keras.layers.LeakyReLU(0.2))(d)
d = keras.layers.Dropout(0.4)(d)
d = keras.layers.Flatten()(d)
d = keras.layers.Dense(1, activation = 'sigmoid')(d)
discriminator = keras.models.Model(inputs = inp_, outputs = d)
# problem occurs here
gan = keras.models.Sequential([generator, discriminator])
Пока мне не помогло ни одно решение в Интернете
AssertionError Traceback (most recent call last)
<ipython-input-32-fd5633c3585a> in <module>()
----> 1 gan = keras.models.Sequential([generator, discriminator])
7 frames
/usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
455 self._self_setattr_tracking = False # pylint: disable=protected-access
456 try:
--> 457 result = method(self, *args, **kwargs)
458 finally:
459 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/sequential.py in __init__(self, layers, name)
140 layers = [layers]
141 for layer in layers:
--> 142 self.add(layer)
143
144 @property
/usr/local/lib/python3.6/dist-packages/tensorflow/python/training/tracking/base.py in _method_wrapper(self, *args, **kwargs)
455 self._self_setattr_tracking = False # pylint: disable=protected-access
456 try:
--> 457 result = method(self, *args, **kwargs)
458 finally:
459 self._self_setattr_tracking = previous_value # pylint: disable=protected-access
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/sequential.py in add(self, layer)
204 # and create the node connecting the current layer
205 # to the input layer we just created.
--> 206 layer(x)
207 set_inputs = True
208
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in __call__(self, *args, **kwargs)
924 if _in_functional_construction_mode(self, inputs, args, kwargs, input_list):
925 return self._functional_construction_call(inputs, args, kwargs,
--> 926 input_list)
927
928 # Maintains info about the `Layer.call` stack.
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/base_layer.py in _functional_construction_call(self, inputs, args, kwargs, input_list)
1115 try:
1116 with ops.enable_auto_cast_variables(self._compute_dtype_object):
-> 1117 outputs = call_fn(cast_inputs, *args, **kwargs)
1118
1119 except errors.OperatorNotAllowedInGraphError as e:
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/functional.py in call(self, inputs, training, mask)
384 """
385 return self._run_internal_graph(
--> 386 inputs, training=training, mask=mask)
387
388 def compute_output_shape(self, input_shape):
/usr/local/lib/python3.6/dist-packages/tensorflow/python/keras/engine/functional.py in _run_internal_graph(self, inputs, training, mask)
515 for x in self.outputs:
516 x_id = str(id(x))
--> 517 assert x_id in tensor_dict, 'Could not compute output ' str(x)
518 output_tensors.append(tensor_dict[x_id].pop())
519
AssertionError: Could not compute output Tensor("conv2d_16/BiasAdd:0", shape=(None, 64, 64, 3), dtype=float32)