CustomAnalyzer productIndexAnalyzer = new CustomAnalyzer
{
Tokenizer = "standard",
Filter = new List<string> { TURKISH_LOWERCASE, "apostrophe", "asciifolding", ENGRAM_FILTER }
};
IIndicesOperationResponse indicesOperationResponse = _elasticClientFactory.Create().CreateIndex(SearchConstants.ProductIndex, c => c
.Analysis(analysis => analysis
.TokenFilters(tf => tf
.Add(TURKISH_LOWERCASE, turkishLowercaseTokenFilter)
.Add(ENGRAM_FILTER, edgeNgramTokenFilter))
.Analyzers(a => a
.Add(PRODUCT_SEARCH_ANALYZER, productSearchAnalyzer)
.Add(PRODUCT_INDEX_ANALYZER, productIndexAnalyzer)
))
.AddMapping<Product>(m => m.MapFromAttributes().Properties(props => props
.Completion(s => s
.Name(p => p.Suggest)
.MaxInputLength(20)
.Payloads()
.PreservePositionIncrements()
.PreserveSeparators()
)
))
);
nest client mapping result on elastic:
"suggest": {
"type": "completion",
"analyzer": "simple",
"payloads": true,
"preserve_separators": true,
"preserve_position_increments": true,
"max_input_length": 20
},
productIndexAnalyzer changed and I add a TURKISH_KEYWORDS analyzer I indexed the results as follows nest with client.It is set to suggest the type of completion.
CustomAnalyzer productIndexAnalyzer = new CustomAnalyzer
{
Tokenizer = "standard",
Filter = new List<string> { TURKISH_LOWERCASE, "apostrophe", "asciifolding", ENGRAM_FILTER, "turkish_keywords" }
};
But it suggests the type is distorted when I add a TURKISH_KEYWORDS analyzer and it's a string.
nest client mapping result on elastic:
"suggest": {
"properties": {
"input": {
"type": "string"
},
"output": {
"type": "string"
},
"weight": {
"type": "long"
}
}
}
I'm using NEST Version 1.7.0 and Elasticsearch Version 1.7.3