0

I'm trying to run the code from github. When I try to run the following code, I get the error tensorflow.python.framework.errors_impl.FailedPreconditionError: /var/folders/fh/0rzmw3r97kl4gft689jt2mhw0000gn/T/tmp-kerasmodelyv4lipk1 is not a directory.

import tempfile
import os
import h5py
import keras
from keras.models import load_model, save_model

def load_model_from_hdf5_group(f, custom_objects=None):
    tempfd, tempfname = tempfile.mkstemp(prefix='tmp-kerasmodel')
    try:
        os.close(tempfd)
        serialized_model = h5py.File(tempfname, 'w')
        root_item = f.get('kerasmodel')
        for attr_name, attr_value in root_item.attrs.items():
            serialized_model.attrs[attr_name] = attr_value
        for k in root_item.keys():
            f.copy(root_item.get(k), serialized_model, k)
        serialized_model.close()
        return load_model(tempfname, custom_objects=custom_objects)
    finally:
        os.unlink(tempfname)

the whole situation is that there is a trained sequential model, and I would like to serialize (self-defined) the model, and it gave me an error like this:

2023-04-05 10:49:01.157321: W tensorflow/python/util/util.cc:368] Sets are not currently considered sequences, but this may change in the future, so consider avoiding using them.

I went to my serialize function and tested them sequentially, when I run kerasutil.save_model_to_hdf5_group(self.model, h5file['model']), I got above path error.

The whole code is following:

model = Sequential()
network_layers = large.layers(input_shape)
for layer in network_layers:
    model.add(layer)
model.add(Dense(nb_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adadelta', metrics=['accuracy'])

model.fit(X, y, batch_size=128, epochs=20, verbose=1)
deep_learning_bot = DeepLearningAgent(model, encoder)
model.save('./agents/deep_bot.h5')
deep_learning_bot.serialize("./agents/deep_bot.h5")

def serialize(self, h5file):
    h5file.require_group('encoder')
    h5file.create_group('encoder')
    h5file['encoder'].attrs['name'] = self.encoder.name()
    h5file['encoder'].attrs['board_width'] = self.encoder.board_width
    h5file['encoder'].attrs['board_height'] = self.encoder.board_height
    h5file.create_group('model')
    kerasutil.save_model_to_hdf5_group(self.model, h5file['model'])

def load_model_from_hdf5_group(f, custom_objects=None):
    tempfd, tempfname = tempfile.mkstemp(prefix='tmp-kerasmodel')
    try:
        os.close(tempfd)
        serialized_model = h5py.File(tempfname, 'w')
        root_item = f.get('kerasmodel')
        for attr_name, attr_value in root_item.attrs.items():
            serialized_model.attrs[attr_name] = attr_value
        for k in root_item.keys():
            f.copy(root_item.get(k), serialized_model, k)
        serialized_model.close()
        return load_model(tempfname, custom_objects=custom_objects)
    finally:
        os.unlink(tempfname)

0 Answers0