1

How can I solve this issue?

from nltk.sentiment.util import demo_sent_subjectivity
sentence='I like her shoes'
demo_sent_subjectivity(sentence)

NameError: name 'save_file' is not defined

demo_sent_subjectivity(text) is a method inside nltk.sentiment.utils which then calls another method demo_subjectivity(trainer, save_analyzer=False, n_instances=None, output=None)

These methods are below:

def demo_sent_subjectivity(text):
    """
    Classify a single sentence as subjective or objective using a stored
    SentimentAnalyzer.

    :param text: a sentence whose subjectivity has to be classified.
    """
    from nltk.classify import NaiveBayesClassifier
    from nltk.tokenize import regexp

    word_tokenizer = regexp.WhitespaceTokenizer()
    try:
        sentim_analyzer = load("sa_subjectivity.pickle")
    except LookupError:
        print("Cannot find the sentiment analyzer you want to load.")
        print("Training a new one using NaiveBayesClassifier.")
        sentim_analyzer = demo_subjectivity(NaiveBayesClassifier.train,         
        True)

    # Tokenize and convert to lower case
    tokenized_text = [word.lower() for word in     
    word_tokenizer.tokenize(text)]
    print(sentim_analyzer.classify(tokenized_text))


def demo_subjectivity(trainer, save_analyzer=False, n_instances=None, output=None):
    from nltk.sentiment import SentimentAnalyzer
    from nltk.corpus import subjectivity

    if n_instances is not None:
        n_instances = int(n_instances / 2)

    subj_docs = [
        (sent, "subj") for sent in subjectivity.sents(categories="subj")    
    [:n_instances]
    ]
    obj_docs = [
        (sent, "obj") for sent in subjectivity.sents(categories="obj") 
       [:n_instances]
    ]


    train_subj_docs, test_subj_docs = split_train_test(subj_docs)
    train_obj_docs, test_obj_docs = split_train_test(obj_docs)

    training_docs = train_subj_docs + train_obj_docs
    testing_docs = test_subj_docs + test_obj_docs

    sentim_analyzer = SentimentAnalyzer()
    all_words_neg = sentim_analyzer.all_words(
        [mark_negation(doc) for doc in training_docs]
    )


    unigram_feats = sentim_analyzer.unigram_word_feats(all_words_neg, 
    min_freq=4)
    sentim_analyzer.add_feat_extractor(extract_unigram_feats,     
    unigrams=unigram_feats)


    training_set = sentim_analyzer.apply_features(training_docs)
    test_set = sentim_analyzer.apply_features(testing_docs)

    classifier = sentim_analyzer.train(trainer, training_set)
    try:
        classifier.show_most_informative_features()
    except AttributeError:
        print(
        "Your classifier does not provide a     
        show_most_informative_features() method."
        )
    results = sentim_analyzer.evaluate(test_set)

    if save_analyzer == True:
        save_file(sentim_analyzer, "sa_subjectivity.pickle")

    if output:
        extr = [f.__name__ for f in sentim_analyzer.feat_extractors]
        output_markdown(
            output,
            Dataset="subjectivity",
            Classifier=type(classifier).__name__,
            Tokenizer="WhitespaceTokenizer",
            Feats=extr,
            Instances=n_instances,
            Results=results,
        )

    return sentim_analyzer

save_file method is called inside demo_subjectivity method, but i did not understand, where is source code for it. I noticed, that save_file method exists in SentimentAnalyzer class, but why here it was called just like save_file, not sentim_analyzer.save_file?

Aigerim Sadir
  • 353
  • 5
  • 18
  • Name error just means that you're trying to use a variable that you haven't defined. Exactly how you are getting this error is difficult to work out as `save_file` doesn't appear in the code you have supplied. Can you please provide a [Minimal, Complete, Reproducible Example](https://stackoverflow.com/help/minimal-reproducible-example) of the code that's causing your error and the full traceback of the error. – David Buck Apr 19 '20 at 19:19
  • Thank you very much for your edits! I added sourse code of those methods – Aigerim Sadir Apr 20 '20 at 08:02

0 Answers0