2

I am trying to analyse an Audio file and train the system based on that extracted features but I am getting an error while Fitting the model saying "Tuple Index out of Range". I have provided the shapes of all the arrays I am using in the comment next to the print statement. Could you help me understand how to define the sizes while defining the model.

Please let me know if any more details are required.

import glob
import numpy as np
import pandas as pd
import random
import librosa
import librosa.display
import glob
import os
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelBinarizer
from tensorflow.keras.layers import LSTM, Dense, Dropout, Flatten
from tensorflow.keras.models import Sequential
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint

X, sample_rate = librosa.load(r'C:\Users\Sumanth\Desktop\voice\Speaker-275-3.wav', res_type='kaiser_fast')
print(X.shape) # Shape is (439238,)

#extracting the MFCC feature from Audio signal
mfccs = librosa.feature.mfcc(y=X, sr=sample_rate, n_mfcc=40)
print(mfccs.shape) # Shape is (40, 858)

#manually assigning the label as 275
z = np.asarray(275)

#Validation data
val_x, sample_rate = librosa.load(r'C:\Users\Sumanth\Desktop\voice\Speaker-275-2.wav', res_type='kaiser_fast')
print(val_x.shape) # Shape is (292826,)

val_y=np.asarray(275)

#Building the model
model = Sequential()
model.add(Dense(256, input_shape=(858,),activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(275,activation='softmax'))
model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
#training our model
model.fit(mfccs, z, epochs=5, validation_data=(val_x, val_y))

-------------------ERROR------------------------------------------------------


IndexError                                Traceback (most recent call last)
<ipython-input-31-adaf98404d0e> in <module>
     40 model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
     41 #training our model
---> 42 model.fit(mfccs, z, epochs=5, validation_data=(val_x, val_y))
     43 
     44 

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, max_queue_size, workers, use_multiprocessing, **kwargs)
    726         max_queue_size=max_queue_size,
    727         workers=workers,
--> 728         use_multiprocessing=use_multiprocessing)
    729 
    730   def evaluate(self,

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_v2.py in fit(self, model, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, validation_freq, **kwargs)
    222           validation_data=validation_data,
    223           validation_steps=validation_steps,
--> 224           distribution_strategy=strategy)
    225 
    226       total_samples = _get_total_number_of_samples(training_data_adapter)

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_v2.py in _process_training_inputs(model, x, y, batch_size, epochs, sample_weights, class_weights, steps_per_epoch, validation_split, validation_data, validation_steps, shuffle, distribution_strategy, max_queue_size, workers, use_multiprocessing)
    545         max_queue_size=max_queue_size,
    546         workers=workers,
--> 547         use_multiprocessing=use_multiprocessing)
    548     val_adapter = None
    549     if validation_data:

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_v2.py in _process_inputs(model, x, y, batch_size, epochs, sample_weights, class_weights, shuffle, steps, distribution_strategy, max_queue_size, workers, use_multiprocessing)
    592         batch_size=batch_size,
    593         check_steps=False,
--> 594         steps=steps)
    595   adapter = adapter_cls(
    596       x,

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training.py in _standardize_user_data(self, x, y, sample_weight, class_weight, batch_size, check_steps, steps_name, steps, validation_split, shuffle, extract_tensors_from_dataset)
   2532       # Check that all arrays have the same length.
   2533       if not self._distribution_strategy:
-> 2534         training_utils.check_array_lengths(x, y, sample_weights)
   2535         if self._is_graph_network and not self.run_eagerly:
   2536           # Additional checks to avoid users mistakenly using improper loss fns.

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_utils.py in check_array_lengths(inputs, targets, weights)
    661 
    662   set_x = set_of_lengths(inputs)
--> 663   set_y = set_of_lengths(targets)
    664   set_w = set_of_lengths(weights)
    665   if len(set_x) > 1:

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_utils.py in set_of_lengths(x)
    656       return set([
    657           y.shape[0]
--> 658           for y in x
    659           if y is not None and not is_tensor_or_composite_tensor(y)
    660       ])

~\AppData\Roaming\Python\Python37\site-packages\tensorflow_core\python\keras\engine\training_utils.py in <listcomp>(.0)
    657           y.shape[0]
    658           for y in x
--> 659           if y is not None and not is_tensor_or_composite_tensor(y)
    660       ])
    661 

IndexError: tuple index out of range
Sumanth M
  • 21
  • 2

0 Answers0