Open
Description
Elasticsearch version : 6.3.2
ElasticSearch won't allow me to set length token filter in a custom normalizer.
Steps to reproduce:
PUT mytest001
{
"settings": {
"analysis": {
"normalizer": {
"md5normalizer":{
"type": "custom",
"filter": ["uppercase","md5length"],
"char_filter": ["hex_chars"]
}
},
"char_filter": {
"hex_chars": {
"type": "pattern_replace",
"pattern": "[^0-9a-fA-F]",
"replacement": ""
}
},
"filter": {
"md5length":{
"type": "length",
"max": 32,
"min": 32
}
}
}
},
"mappings": {
"type": {
"properties": {
"foo": {
"type": "keyword",
"normalizer": "md5normalizer"
}
}
}
}
}
ElasticSearch returns:
{
"error": {
"root_cause": [
{
"type": "remote_transport_exception",
"reason": "[node1][192.168.0.1:9300][indices:admin/create]"
}
],
"type": "illegal_argument_exception",
"reason": "Custom normalizer [md5normalizer] may not use filter [md5length]"
},
"status": 400
}