from keras.models import Model
import keras
from keras.layers import Concatenate
from keras.layers import InputLayer
from keras.layers import Conv2D, Dense, Reshape, concatenate, Flatten, Input,Conv2DTranspose,BatchNormalization
inputs = keras.layers.Input(shape=(64, 64, 3), name='main')
label = keras.layers.Input(shape=(2,), name='auxiliary')
#encoding_x = Reshape((64,64,256))(encoding_x)
print('in:',inputs)
encoding_x = Conv2D(16, kernel_size=5, activation='relu',name='c1')(inputs)
print('con1:',encoding_x)
encoding_x = BatchNormalization(momentum=0.8)(encoding_x)
encoding_x = Conv2D(64, kernel_size=5, activation='relu',name='c2')(encoding_x)
print('con2:',encoding_x)
encoding_x = BatchNormalization(momentum=0.8)(encoding_x)
encoding_x = Conv2D(128, kernel_size=5, activation='relu',name='c3')(encoding_x)
encoding_x = BatchNormalization(momentum=0.8)(encoding_x) # 52*52*128
print('con3:',encoding_x)
# decoding层,加性别限制条件
print('inlabel:',label)
label = Dense(128*52*52, activation='relu')(label)
print('dense:',label)
label = Reshape((52,52,128))(label)
print('reshape:',label)
encoding_x = concatenate(inputs=[encoding_x, label],axis=3)
print('conne:',encoding_x)
# label = Dense(128 * 52 * 52, activation='relu')(label)
# encoding_x = concatenate(inputs=[encoding_x, label])
# encoding_x = Reshape((52,52,256))(encoding_x)
decoding_x = Conv2DTranspose(64, kernel_size=5, activation='relu')(encoding_x)
print('ct1:',decoding_x)
decoding_x = BatchNormalization(momentum=0.8)(decoding_x)
decoding_x = Conv2DTranspose(16, kernel_size=5, activation='relu')(decoding_x)
print('ct1:',decoding_x)
decoding_x = BatchNormalization(momentum=0.8)(decoding_x)
decoding_x = Conv2DTranspose(3, kernel_size=5, activation='tanh')(decoding_x)
print('ct1:',decoding_x)
decoding_x = Reshape((64, 64, 3))(decoding_x)
decoding_y = decoding_x
decoding_x = concatenate([inputs, decoding_x], axis=3)
model = Model([inputs,label],[decoding_y,decoding_x])
E:\Anaconda3\lib\site-packages\keras-2.2.2-py3.6.egg\keras\engine\network.py:186: UserWarning: Model inputs must come from `keras.layers.Input` (thus holding past layer metadata), they cannot be the output of a previous non-Input layer. Here, a tensor specified as input to your model was not an Input tensor, it was generated by layer reshape_1.
Note that input tensors are instantiated via `tensor = keras.layers.Input(shape)`.
The tensor that caused the issue was: reshape_1/Reshape:0
str(x.name))
Traceback (most recent call last):
File "C:/Users/Administrator/Desktop/gender/1.py", line 49, in <module>
model = Model([inputs,label],[decoding_y,decoding_x])
File "E:\Anaconda3\lib\site-packages\keras-2.2.2-py3.6.egg\keras\legacy\interfaces.py", line 91, in wrapper
return func(*args, **kwargs)
File "E:\Anaconda3\lib\site-packages\keras-2.2.2-py3.6.egg\keras\engine\network.py", line 93, in __init__
self._init_graph_network(*args, **kwargs)
File "E:\Anaconda3\lib\site-packages\keras-2.2.2-py3.6.egg\keras\engine\network.py", line 237, in _init_graph_network
self.inputs, self.outputs)
File "E:\Anaconda3\lib\site-packages\keras-2.2.2-py3.6.egg\keras\engine\network.py", line 1430, in _map_graph_network
str(layers_with_complete_input))
ValueError: Graph disconnected: cannot obtain value for tensor Tensor("auxiliary:0", shape=(?, 2), dtype=float32) at layer "auxiliary". The following previous layers were accessed without issue: ['main', 'c1', 'batch_normalization_1', 'c2']
Why is there such a mistake?ValueError: Graph disconnected: cannot obtain value for tensor Tensor("auxiliary:0", shape=(?, 2), dtype=float32) at layer "auxiliary". The following previous layers were accessed without issue: ['main', 'c1', 'batch_normalization_1', 'c2']
I think the error is that you overwrite the variable label
. label
should be used to create the input tensor. And afterwards, you should never assign something to it. Do something like:
label_intermediate = Dense(128*52*52, activation='relu')(label)
I think the error is that you overwrite the variable
label
.label
should be used to create the input tensor. And afterwards, you should never assign something to it. Do something like:
label_intermediate = Dense(128*52*52, activation='relu')(label)
thanks, I think I had solver it
I think the error is that you overwrite the variable
label
.label
should be used to create the input tensor. And afterwards, you should never assign something to it. Do something like:
label_intermediate = Dense(128*52*52, activation='relu')(label)
thanks, I think I had solver it
Hi,I've met the same error.
Could you please tell me how to solve it?
Thanks!
Most helpful comment
I think the error is that you overwrite the variable
label
.label
should be used to create the input tensor. And afterwards, you should never assign something to it. Do something like:label_intermediate = Dense(128*52*52, activation='relu')(label)