Can anyone assist me in solving this error AttributeError: 'Adam' object has no attribute 'step' When Bert Model is trained with NER, this error occurs. I could not solve this problem, perhaps due to the change in libraries, since it mainly stems from the update parameters step().
from transformers import AdamW
from transformers import get_linear_schedule_with_warmup
from transformers import BertTokenizer, BertForSequenceClassification
from transformers import DataCollatorForTokenClassification
from transformers import AutoModelForTokenClassification
epochs = 5
max_grad_norm = 1.0
for _ in trange(epochs, desc="Epoch"):
# TRAIN loop
model.train()
tr_loss = 0
nb_tr_examples, nb_tr_steps = 0, 0
for step, batch in enumerate(train_dataloader):
# add batch to gpu
batch = tuple(t.to(device) for t in batch)
b_input_ids, b_input_mask, b_labels = batch
# forward pass
token_classifier_output = model(b_input_ids,
token_type_ids=None,
attention_mask=b_input_mask,
labels=b_labels)
token_classifier_output.loss.backward()
# track train loss
tr_loss += token_classifier_output.loss.item()
nb_tr_examples += b_input_ids.size(0)
nb_tr_steps += 1
# gradient clipping
torch.nn.utils.clip_grad_norm_(parameters=model.parameters(), max_norm=max_grad_norm)
# update parameters
optimizer.step()
model.zero_grad()
# print train loss per epoch
print("Train loss: {}".format(tr_loss/nb_tr_steps))
31 # update parameters
---> 32 optimizer.step()
1 frames
/usr/local/lib/python3.8/dist-packages/keras/optimizers/optimizer_v2/optimizer_v2.py in __getattribute__(self, name)
864 """Overridden to support hyperparameter access."""
865 try:
--> 866 return super(OptimizerV2, self).__getattribute__(name)
867 except AttributeError as e:
868 # Needed to avoid infinite recursion with __setattr__.
AttributeError: 'Adam' object has no attribute 'step'