I am trying to design analyzers for Tokenization, Case folding, Stopword removal, Word N-gram extraction and Stemming, however after writing the code it keeps popping up errors. Index name is articles and type name is article. I already tried deleting and recreating the index severally, iam very new to elasticsearch so my codes may be very wrong, please help out.
curl -H "Content-Type: application/json" -XPUT http://localhost:9200/articles/_mapping/article -d '{
"settings": {
"analysis": {
"analyzer": {
"content_tokenizer": {
"type": "standard",
"tokenizer": "standard"
},
"content_case_folding": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"lowercase"
]
},
"content_stopword_removal": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"lowercase",
"stop"
]
},
"content_ngram_extraction": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"lowercase",
"stop",
"ngram"
]
},
"content_stemmer": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"lowercase",
"stop",
"ngram",
"stemmer"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 2,
"max_gram": 5
},
"stop": {
"type": "stop",
"stopwords": "_english_"
},
"stemmer": {
"type": "stemmer",
"language": "english"
}
}
}
},
"mappings": {
"properties": {
"content": {
"type": "text",
"fields": {
"tokenized": {
"type": "text",
"analyzer": "content_tokenizer"
},
"case_folded": {
"type": "text",
"analyzer": "content_case_folding"
},
"without_stopwords": {
"type": "text",
"analyzer": "content_stopword_removal"
},
"ngrammed": {
"type": "text",
"analyzer": "content_ngram_extraction"
},
"stemmed": {
"type": "text",
"analyzer": "content_stemmer"
}
}
}
}
}
}'
It keeps bringing this error
{"error":{"root_cause":[{"type":"mapper_parsing_exception","reason":"Root mapping definition has unsupported parameters: [settings : {analysis={analyzer={content_tokenizer={type=standard, tokenizer=standard}, content_case_folding={type=custom, tokenizer=standard, filter=[lowercase]}, content_stopword_removal={type=custom, tokenizer=standard, filter=[lowercase, stop]}, content_ngram_extraction={type=custom, tokenizer=standard, filter=[lowercase, stop, ngram]}, content_stemmer={type=custom, tokenizer=standard, filter=[lowercase, stop, ngram, stemmer]}}, filter={ngram={type=ngram, min_gram=2, max_gram=5}, stop={type=stop, stopwords=english}, stemmer={type=stemmer, language=english}}}}] [mappings : {properties={content={type=text, fields={tokenized={type=text, analyzer=content_tokenizer}, case_folded={type=text, analyzer=content_case_folding}, without_stopwords={type=text, analyzer=content_stopword_removal}, ngrammed={type=text, analyzer=content_ngram_extraction}, stemmed={type=text, analyzer=content_stemmer}}}}}]"}],"type":"mapper_parsing_exception","reason":"Root mapping definition has unsupported parameters: [settings : {analysis={analyzer={content_tokenizer={type=standard, tokenizer=standard}, content_case_folding={type=custom, tokenizer=standard, filter=[lowercase]}, content_stopword_removal={type=custom, tokenizer=standard, filter=[lowercase, stop]}, content_ngram_extraction={type=custom, tokenizer=standard, filter=[lowercase, stop, ngram]}, content_stemmer={type=custom, tokenizer=standard, filter=[lowercase, stop, ngram, stemmer]}}, filter={ngram={type=ngram, min_gram=2, max_gram=5}, stop={type=stop, stopwords=english}, stemmer={type=stemmer, language=english}}}}] [mappings : {properties={content={type=text, fields={tokenized={type=text, analyzer=content_tokenizer}, case_folded={type=text, analyzer=content_case_folding}, without_stopwords={type=text, analyzer=content_stopword_removal}, ngrammed={type=text, analyzer=content_ngram_extraction}, stemmed={type=text, analyzer=content_stemmer}}}}}]"},"status":400}