Hi all, I am trying to write a seq2seq model with Keras that takes 2D array as input
Code for model:
encoder_inputs = Input(shape=(num, len))
decoder_inputs = Input(shape=(num, len))
encoder_cell = LSTM(200, return_state=True)
encoder_outputs, state_h, state_c = encoder_cell(encoder_inputs)
encoder_states = [state_h, state_c]
encoder_cell = LSTM(200, return_sequences=True)(encoder_inputs)
decoder_cell = RepeatVector(len)(encoder_cell[0])
decoder_cell = LSTM(200, return_sequences=True)(decoder_cell, initial_state=encoder_states)
decoder_dense = Dense(num, activation='softmax')
logits = decoder_dense(decoder_cell[0])
return Model([encoder_inputs, decoder_inputs], logits)
But I keep getting this error:
node = layer._inbound_nodes[node_index]
AttributeError: 'NoneType' object has no attribute '_inbound_nodes'
I'm having a simmilar problem, did you find a solution?
I had a similar problem where I was slicing an input tensor, performing some operations on it and then concatenating it back. I solved the issue by making this procedure a lambda function and converting it to a keras layer using keras.layers.Lambda.
I'm having a simmilar problem, because use K.reshape, change it to Reshape
I"m having a similar problem - I want to add a ones tensor:
ones_tensor = Lambda(lambda x: K.ones_like(x))(some_previous_layer)
ones_tensor = ones_tensor[:, 0]
ones_tensor = Lambda(lambda x: K.expand_dims(x, axis=1))(ones_tensor)
@danFromTelAviv didn't work for me
S = 7
L = 25
## input layer
input_layer = Input((S, S, L, 1))
output_units = 13
## convolutional layers
conv_layer1 = Conv3D(filters=8, kernel_size=(3, 3, 1), activation='relu')(input_layer)
print(conv_layer1._keras_shape)
conv_layer2 = Conv3D(filters=16, kernel_size=(3, 3, 1), activation='relu')(conv_layer1)
print(conv_layer2._keras_shape)
conv_layer3 = Conv3D(filters=32, kernel_size=(3, 3, 1), activation='relu')(conv_layer2)
print(conv_layer3._keras_shape)
conv3d_shape = conv_layer3._keras_shape
conv_layer3 = Reshape((conv3d_shape[1], conv3d_shape[2], conv3d_shape[3]*conv3d_shape[4]))(conv_layer3)
print(conv_layer3._keras_shape)
conv_layer4 = Conv2D(filters=64, kernel_size=(1,1), activation='relu')(conv_layer3)
print(conv_layer4._keras_shape)
conv_layer4_squeezed = Lambda(lambda x: K.squeeze(x, 1))(conv_layer4)
print(K.is_keras_tensor(conv_layer4_squeezed))
lstm_out = LSTM(128)(conv_layer4_squeezed)
print(lstm_out._keras_shape)
## fully connected layers
dense_layer1 = Dense(units=256, activation='relu')(flatten_layer)
dense_layer2 = Dense(units=128, activation='relu')(dense_layer1)
dense_layer2 = Dropout(0.2)(dense_layer2)
output_layer = Dense(units=output_units, activation='softmax')(dense_layer2)
print(K.is_keras_tensor(output_layer))
model = Model(inputs=input_layer, outputs=output_layer)
model.summary()
AttributeError: 'NoneType' object has no attribute 'inbound_nodes'
And yes, I checked whether all these Tensors are Keras Tensors and yes they are.
TF-1.13
Keras-2.0
I had a similar problem where I was slicing an input tensor, performing some operations on it and then concatenating it back. I solved the issue by making this procedure a lambda function and converting it to a keras layer using keras.layers.Lambda.
Hi even im doing partition of different scaled feature maps and then concatenating all . but im facing bellow error . can you please suggest how to resolve
node = layer._inbound_nodes[node_index]
AttributeError: 'NoneType' object has no attribute '_inbound_nodes'
Here is my code
model_resnet = ResNet50(include_top=False, weights="imagenet", input_shape=(386, 124, 3))
conv1 = Conv2D(128, (2,2), activation = 'relu', strides = (3,3))(model_resnet.output)
conv1 = BatchNormalization()(conv1)
pool1 = MaxPooling2D((2,2), padding='same')(conv1)
relu1 = Activation('relu')(pool1)
drop1 = Dropout(rate = 0.5)(relu1)
from keras.layers import Reshape
flattened1 = Reshape((256,))(drop1)
conv2 = Conv2D(256, (2,2), activation = 'relu', strides = (2,2))(model_resnet.output)
conv2 = BatchNormalization()(conv2)
pool2 = MaxPooling2D((2,2), padding='same')(conv2)
relu2 = Activation('relu')(pool2)
drop2 = Dropout(rate = 0.5)(relu2)
flattened2 = Reshape((768,))(drop2)
b= pool2.shape[1]
n_partitions= 3
delta= b//n_partitions
partitions= []
for i in range(1, 3):
slice= pool2[ :, (i-1)delta:idelta, :]
partitions.append(slice)
p1= partitions[0]
p1 = BatchNormalization()(p1)
p1 = MaxPooling2D((2,2), padding='same')(p1)
p1 = Activation('relu')(p1)
p1 = Dropout(rate = 0.5)(p1)
p1 = Reshape((256,))(p1)
p2= partitions[1]
p2 = BatchNormalization()(p2)
p2 = MaxPooling2D((2,2), padding='same')(p2)
p2 = Activation('relu')(p2)
p2 = Dropout(rate = 0.5)(p2)
p2 = Reshape((256,))(p2)
conv3 = Conv2D(256, (2,2), activation = 'relu', strides = (1,1))(model_resnet.output)
conv3 = BatchNormalization()(conv3)
pool3 = MaxPooling2D((2,2), padding='same')(conv3)
relu3 = Activation('relu')(pool3)
drop3 = Dropout(rate = 0.5)(relu3)
flattened3 = Reshape((3072,))(drop3)
b= pool3.shape[1]
n_partitions= 3
delta= b//n_partitions
partitions= []
for i in range(1, 4):
slice= pool3[ :, (i-1)delta:idelta, :]
partitions.append(slice)
p3= partitions[0]
p3 = BatchNormalization()(p3)
p3 = MaxPooling2D((2,2), padding='same')(p3)
p3 = Activation('relu')(p3)
p3 = Dropout(rate = 0.2)(p3)
p3 = Reshape((256,))(p3)
p4= partitions[1]
p4 = BatchNormalization()(p4)
p4 = MaxPooling2D((2,2), padding='same')(p4)
p4 = Activation('relu')(p4)
p4 = Dropout(rate = 0.2)(p4)
p4 = Reshape((256,))(p4)
p5= partitions[2]
p5 = BatchNormalization()(p5)
p5 = MaxPooling2D((2,2), padding='same')(p5)
p5 = Activation('relu')(p5)
p5 = Dropout(rate = 0.2)(p5)
p5 = Reshape((256,))(p5)
F =concatenate([flattened1, flattened2, flattened3, p1, p2, p3, p4, p5],1)
x4 = Dense(units=751, activation='softmax',name='fc8', kernel_initializer=RandomNormal(mean=0.0,stddev=0.001))(F)
model = Model(inputs=model_resnet.input, outputs=x4)
Most helpful comment
I had a similar problem where I was slicing an input tensor, performing some operations on it and then concatenating it back. I solved the issue by making this procedure a lambda function and converting it to a keras layer using keras.layers.Lambda.