### YamlMime:JavaType
uid: "com.azure.search.models.NGramTokenizer"
fullName: "com.azure.search.models.NGramTokenizer"
name: "NGramTokenizer"
nameWithType: "NGramTokenizer"
summary: "Tokenizes the input into n-grams of the given size(s)."
inheritances:
- ""
- ""
inheritedClassMethods:
- classRef: ""
methodsRef:
- ""
- ""
- classRef: "java.lang.Object"
methodsRef:
- "clone"
- "equals"
- "finalize"
- "getClass"
- "hashCode"
- "notify"
- "notifyAll"
- "toString"
- "wait"
- "wait"
- "wait"
syntax: "public final class **NGramTokenizer** extends "
constructors:
- uid: "com.azure.search.models.NGramTokenizer.NGramTokenizer()"
fullName: "com.azure.search.models.NGramTokenizer.NGramTokenizer()"
name: "NGramTokenizer()"
nameWithType: "NGramTokenizer.NGramTokenizer()"
syntax: "public NGramTokenizer()"
methods:
- uid: "com.azure.search.models.NGramTokenizer.getMaxGram()"
fullName: "com.azure.search.models.NGramTokenizer.getMaxGram()"
name: "getMaxGram()"
nameWithType: "NGramTokenizer.getMaxGram()"
summary: "Get the maxGram property: The maximum n-gram length."
syntax: "public Integer getMaxGram()"
desc: "Get the maxGram property: The maximum n-gram length. Default is 2. Maximum is 300."
returns:
description: "the maxGram value."
type: "Integer"
- uid: "com.azure.search.models.NGramTokenizer.getMinGram()"
fullName: "com.azure.search.models.NGramTokenizer.getMinGram()"
name: "getMinGram()"
nameWithType: "NGramTokenizer.getMinGram()"
summary: "Get the minGram property: The minimum n-gram length."
syntax: "public Integer getMinGram()"
desc: "Get the minGram property: The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram."
returns:
description: "the minGram value."
type: "Integer"
- uid: "com.azure.search.models.NGramTokenizer.getTokenChars()"
fullName: "com.azure.search.models.NGramTokenizer.getTokenChars()"
name: "getTokenChars()"
nameWithType: "NGramTokenizer.getTokenChars()"
summary: "Get the tokenChars property: Character classes to keep in the tokens."
syntax: "public List getTokenChars()"
desc: "Get the tokenChars property: Character classes to keep in the tokens."
returns:
description: "the tokenChars value."
type: "List<>"
- uid: "com.azure.search.models.NGramTokenizer.setMaxGram(java.lang.Integer)"
fullName: "com.azure.search.models.NGramTokenizer.setMaxGram(Integer maxGram)"
name: "setMaxGram(Integer maxGram)"
nameWithType: "NGramTokenizer.setMaxGram(Integer maxGram)"
summary: "Set the maxGram property: The maximum n-gram length."
parameters:
- description: "the maxGram value to set."
name: "maxGram"
type: "Integer"
syntax: "public NGramTokenizer setMaxGram(Integer maxGram)"
desc: "Set the maxGram property: The maximum n-gram length. Default is 2. Maximum is 300."
returns:
description: "the NGramTokenizer object itself."
type: ""
- uid: "com.azure.search.models.NGramTokenizer.setMinGram(java.lang.Integer)"
fullName: "com.azure.search.models.NGramTokenizer.setMinGram(Integer minGram)"
name: "setMinGram(Integer minGram)"
nameWithType: "NGramTokenizer.setMinGram(Integer minGram)"
summary: "Set the minGram property: The minimum n-gram length."
parameters:
- description: "the minGram value to set."
name: "minGram"
type: "Integer"
syntax: "public NGramTokenizer setMinGram(Integer minGram)"
desc: "Set the minGram property: The minimum n-gram length. Default is 1. Maximum is 300. Must be less than the value of maxGram."
returns:
description: "the NGramTokenizer object itself."
type: ""
- uid: "com.azure.search.models.NGramTokenizer.setTokenChars(java.util.List)"
fullName: "com.azure.search.models.NGramTokenizer.setTokenChars(List tokenChars)"
name: "setTokenChars(List tokenChars)"
nameWithType: "NGramTokenizer.setTokenChars(List tokenChars)"
summary: "Set the tokenChars property: Character classes to keep in the tokens."
parameters:
- description: "the tokenChars value to set."
name: "tokenChars"
type: "List<>"
syntax: "public NGramTokenizer setTokenChars(List tokenChars)"
desc: "Set the tokenChars property: Character classes to keep in the tokens."
returns:
description: "the NGramTokenizer object itself."
type: ""
type: "class"
desc: "Tokenizes the input into n-grams of the given size(s). This tokenizer is implemented using Apache Lucene."
metadata: {}
package: "com.azure.search.models"
artifact: com.azure:azure-search:11.0.0-beta.1