I have a simple keras model. After the model is saved. I am unable to load the model. This is the error I get after instantiating the model and trying to load weights:
Using TensorFlow backend.
Traceback (most recent call last):File "test.py", line 4, in <module>model = load_model("test.h5")File "/usr/lib/python3.7/site-packages/keras/engine/saving.py", line 419, in load_modelmodel = _deserialize_model(f, custom_objects, compile)File "/usr/lib/python3.7/site-packages/keras/engine/saving.py", line 258, in _deserialize_model
.format(len(layer_names), len(filtered_layers))ValueError: You are trying to load a weight file containing 6 layers into a model with 0 layers
For instantiating the model and using model.load_weights and doing a model summary. I get None when I print the model using print(model)
Traceback (most recent call last):
File "test.py", line 7, in <module>print(model.summary())
AttributeError: 'NoneType' object has no attribute 'summary'
Here is my Network:
from keras.models import Sequential
from keras.layers import Conv2D, MaxPooling2D, InputLayer, Flatten, Dense, BatchNormalizationdef create_model():kernel_size = 5pool_size = 2batchsize = 64model = Sequential()model.add(InputLayer((36, 120, 1)))model.add(Conv2D(filters=20, kernel_size=kernel_size, activation='relu', padding='same'))model.add(BatchNormalization())model.add(MaxPooling2D(pool_size))model.add(Conv2D(filters=50, kernel_size=kernel_size, activation='relu', padding='same'))model.add(BatchNormalization())model.add(MaxPooling2D(pool_size))model.add(Flatten())model.add(Dense(120, activation='relu'))model.add(Dense(2, activation='relu'))return model
Training procedure script:
import numpy as np
from keras import optimizers
from keras import losses
from sklearn.model_selection import train_test_split
from model import create_modeldef data_loader(images, pos):while(True):for i in range(0, images.shape[0], 64):if (i+64) < images.shape[0]:img_batch = images[i:i+64]pos_batch = pos[i:i+64]yield img_batch, pos_batchelse:img_batch = images[i:]pos_batch = pos[i:]yield img_batch, pos_batchdef main():model = create_model()sgd = optimizers.Adadelta(lr=0.01, rho=0.95, epsilon=None, decay=0.0)model.compile(loss=losses.mean_squared_error, optimizer=sgd)print("traning")data = np.load("data.npz")images = data['images']pos = data['pos']x_train, x_test, y_train, y_test = train_test_split(images, pos, test_size=0.33, random_state=42)model.fit_generator(data_loader(x_train, y_train), steps_per_epoch=x_train.shape[0]//64, validation_data=data_loader(x_test, y_test), \validation_steps = x_test.shape[0]//64, epochs=1)model.save('test.h5')model.save_weights('test_weights.h5')print("training done")if __name__ == '__main__':main()