I'm getting the value error of incompatible shape of tensors of my code. The following is my code :
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import ReLU,LeakyReLU,ELU,PReLU,Softmax
from tensorflow.keras.layers import Dropout
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.optimizers.legacy import SGD,Adam
from keras_tuner.tuners import RandomSearch
from tensorflow.keras.initializers import he_normal
import random
import numpy as np
import tensorflow as tf
import pandas as pd
from sklearn.metrics import accuracy_score,classification_report,confusion_matrix
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
seed_value = 42
np.random.seed(seed_value)
random.seed(seed_value)
tf.random.set_seed(seed_value)
df=pd.read_csv('IRIS.csv')
df['species']=df['species'].replace({
'Iris-setosa':0,
'Iris-versicolor':1,
'Iris-virginica':2})
x=df.drop(columns=['species'],axis=1)
y=df['species']
xtrain,xtest,ytrain,ytest=train_test_split(x,y,test_size=0.3,random_state=42)
from sklearn.preprocessing import StandardScaler
sc=StandardScaler()
xtrain_scaled=sc.fit_transform(xtrain)
xtest_scaled=sc.transform(xtest)
ytrain_encoded = to_categorical(ytrain,3)
ytest_encoded=to_categorical(ytest,3)
def build_model(hp):
model=Sequential()
units=hp.Int('units',min_value=18,max_value=729,step=9)
bias_initiliazer=he_normal(seed=None)
model.add(Dense(units=hp.Int('initial neurons for first layer',min_value=9,max_value=729,step=9,default=9),
kernel_initializer='he_normal',
use_bias=True,
bias_initializer=bias_initiliazer,
activation='relu',
input_dim=4))
counter=0
for i in range(hp.Int('num_layers',min_value=2,max_value=20,step=1)):
if counter==0:
model.add(Dense(units=units,
kernel_initializer='he_normal',
use_bias=True,
bias_initializer=bias_initiliazer,
activation='relu'),)
model.add(Dropout(hp.Choice('dropout',values=[0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9])))
else:
model.add(Dense(units=units,
kernel_initializer='he_normal',
use_bias=True,
bias_initializer=bias_initiliazer,
activation='relu'))
model.add(Dropout(hp.Choice('dropout',values=[0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9])))
counter+=1
model.add(Dense(3,activation='softmax'))
learning_rate = hp.Float("lr", min_value=1e-4, max_value=1e-2, sampling="log")
model.compile(optimizer=tf.keras.optimizers.legacy.Adam(learning_rate=learning_rate),
loss='categorical_crossentropy',
metrics=['accuracy']
)
return model
tuner=RandomSearch(
hypermodel=build_model,
objective='val_accuracy',
max_trials=5,
seed=42,
executions_per_trial=5)
tuner.search(xtrain_scaled,ytrain_encoded,
epochs=5,
validation_data=(xtest_scaled,ytest_encoded))
main_model=tuner.get_best_models(num_models=1)[0]
model_history=main_model.fit(xtrain_scaled,ytrain_encoded,epochs=100,initial_epoch=11,batch_size=32,
validation_data=(xtest_scaled,ytest_encoded),
validation_split=0.33)
ypred_test=np.argmax(main_model.predict(xtest_scaled),axis=-1)
ypred_train=np.argmax(main_model.predict(xtrain_scaled),axis=-1)
missclassified=np.sum(ytest != ypred_test)
classified=np.sum(ytest== ypred_test)
print('Count of missclassified samples are: ',missclassified)
print('Count of classified samples are: ',classified)
print('Classification report for training set as follows:\n',classification_report(ytrain,ypred_train))
print('-'*50)
print('Classification report for testing set as follows:\n',classification_report(ytest,ypred_test))
print('Training set accuracy is {:.2f}'.format(accuracy_score(ytrain,ypred_train)))
print('Training set accuracy is {:.2f}'.format(accuracy_score(ytest,ypred_test)))
print('Confusion matrix: {}\n'.format(confusion_matrix(ytest,ypred_test)))
import matplotlib.pyplot as plt
fig, axs = plt.subplots(2, 1, figsize=(5, 5))
# Plot Accuracy
axs[0].plot(model_history.history['accuracy'])
axs[0].plot(model_history.history['val_accuracy'])
axs[0].set_title("Accuracy")
axs[0].legend(['train', 'test'])
# Plot Loss
axs[1].plot(model_history.history['loss'])
axs[1].plot(model_history.history['val_loss'])
axs[1].set_title('Loss')
axs[1].legend(['Train', 'Test'])
# Display the subplots
plt.tight_layout()
plt.show()
The error as follows:
ValueError: Received incompatible tensor with shape (512,) when attempting to restore variable with shape (9,) and name dense/bias:0.
WARNING:tensorflow:Detecting that an object or model or tf.train.Checkpoint is being deleted with unrestored values. See the following logs for the specific values in question. To silence these warnings, use `status.expect_partial()`. See https://www.tensorflow.org/api_docs/python/tf/train/Checkpoint#restorefor details about the status object returned by the restore function.
But one things is strange that the above code is runnign perfectly well in GOOGLE COLAB Notebooks. But its not running in the local machine like Visual Studio Code. The following are the versions which Im using for the above code:
Tensorflow:- 2.12.0
Keras Tuner:- 1.3.5
Hoping that anyone can give the reply as soon as possible.
Regrads
Guna Sekhar.