Skip to content

Instantly share code, notes, and snippets.

@inqueue
Created November 10, 2017 18:51
Show Gist options
  • Save inqueue/cd6f43cc438dd0fa9758c05cda5e46fa to your computer and use it in GitHub Desktop.
Save inqueue/cd6f43cc438dd0fa9758c05cda5e46fa to your computer and use it in GitHub Desktop.
PUT names
{
"settings": {
"analysis": {
"analyzer": {
"my_analyzer": {
"tokenizer": "my_tokenizer"
}
},
"tokenizer": {
"my_tokenizer": {
"type": "ngram",
"min_gram": 3,
"max_gram": 8
}
}
}
}
}
@inqueue
Copy link
Author

inqueue commented Nov 10, 2017

POST names/_analyze
{
  "analyzer": "my_analyzer",
  "text": "Zachary"
}
{
  "tokens": [
    {
      "token": "Zac",
      "start_offset": 0,
      "end_offset": 3,
      "type": "word",
      "position": 0
    },
    {
      "token": "Zach",
      "start_offset": 0,
      "end_offset": 4,
      "type": "word",
      "position": 1
    },
    {
      "token": "Zacha",
      "start_offset": 0,
      "end_offset": 5,
      "type": "word",
      "position": 2
    },
    {
      "token": "Zachar",
      "start_offset": 0,
      "end_offset": 6,
      "type": "word",
      "position": 3
    },
    {
      "token": "Zachary",
      "start_offset": 0,
      "end_offset": 7,
      "type": "word",
      "position": 4
    },
    {
      "token": "ach",
      "start_offset": 1,
      "end_offset": 4,
      "type": "word",
      "position": 5
    },
    {
      "token": "acha",
      "start_offset": 1,
      "end_offset": 5,
      "type": "word",
      "position": 6
    },
    {
      "token": "achar",
      "start_offset": 1,
      "end_offset": 6,
      "type": "word",
      "position": 7
    },
    {
      "token": "achary",
      "start_offset": 1,
      "end_offset": 7,
      "type": "word",
      "position": 8
    },
    {
      "token": "cha",
      "start_offset": 2,
      "end_offset": 5,
      "type": "word",
      "position": 9
    },
    {
      "token": "char",
      "start_offset": 2,
      "end_offset": 6,
      "type": "word",
      "position": 10
    },
    {
      "token": "chary",
      "start_offset": 2,
      "end_offset": 7,
      "type": "word",
      "position": 11
    },
    {
      "token": "har",
      "start_offset": 3,
      "end_offset": 6,
      "type": "word",
      "position": 12
    },
    {
      "token": "hary",
      "start_offset": 3,
      "end_offset": 7,
      "type": "word",
      "position": 13
    },
    {
      "token": "ary",
      "start_offset": 4,
      "end_offset": 7,
      "type": "word",
      "position": 14
    }
  ]
}

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment