I downloaded celebA dataset and extracted it to my harddrive. it's in a folder structure like below.
dataset_directory
+- celeba
+- img_align_celeba
+- 000001.jpg
+- 000002.jpg
+- 000003.jpg
+- ...
I also have the annotation files both in txt and CSV format
What's the best way to load this as a dataset? The only way I have done before was loading from TensorFlow using something like the code below but this will not work there in this case
(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()
I am hoping to build a model using the function below
(x_train, y_train), (x_test, y_test) = fashion_mnist.load_data()
x_train = x_train.reshape(-1,28,28,1)
x_test = x_test.reshape(-1,28,28,1)
def build_model(hp): #random search passes this hyperparameter() object
model = keras.models.Sequential()
#model.add(Conv2D(32, (3, 3), input_shape=x_train.shape[1:]))
model.add(Conv2D(hp.Int("input_units", min_value=32, max_value=256, step=32), (3,3), input_shape = x_train.shape[1:]))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
for i in range(hp.Int("n_layers",min_value = 1, max_value = 4, step=1)):
#model.add(Conv2D(32, (3, 3)))
model.add(Conv2D(hp.Int(f"conv_{i}_units", min_value=32, max_value=256, step=32), (3,3)))
model.add(Activation('relu'))
#model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten()) # this converts our 3D feature maps to 1D feature vectors
model.add(Dense(10))
model.add(Activation("softmax"))
model.compile(optimizer="adam",
loss="sparse_categorical_crossentropy",
metrics=["accuracy"])
return model
I would appreciate any help