### YamlMime:JavaType
uid: "com.azure.search.models.KeywordTokenizerV2"
fullName: "com.azure.search.models.KeywordTokenizerV2"
name: "KeywordTokenizerV2"
nameWithType: "KeywordTokenizerV2"
summary: "Emits the entire input as a single token."
inheritances:
- ""
- ""
inheritedClassMethods:
- classRef: ""
methodsRef:
- ""
- ""
- classRef: "java.lang.Object"
methodsRef:
- "clone"
- "equals"
- "finalize"
- "getClass"
- "hashCode"
- "notify"
- "notifyAll"
- "toString"
- "wait"
- "wait"
- "wait"
syntax: "public final class **KeywordTokenizerV2** extends "
constructors:
- uid: "com.azure.search.models.KeywordTokenizerV2.KeywordTokenizerV2()"
fullName: "com.azure.search.models.KeywordTokenizerV2.KeywordTokenizerV2()"
name: "KeywordTokenizerV2()"
nameWithType: "KeywordTokenizerV2.KeywordTokenizerV2()"
syntax: "public KeywordTokenizerV2()"
methods:
- uid: "com.azure.search.models.KeywordTokenizerV2.getMaxTokenLength()"
fullName: "com.azure.search.models.KeywordTokenizerV2.getMaxTokenLength()"
name: "getMaxTokenLength()"
nameWithType: "KeywordTokenizerV2.getMaxTokenLength()"
summary: "Get the maxTokenLength property: The maximum token length."
syntax: "public Integer getMaxTokenLength()"
desc: "Get the maxTokenLength property: The maximum token length. Default is 256. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters."
returns:
description: "the maxTokenLength value."
type: "Integer"
- uid: "com.azure.search.models.KeywordTokenizerV2.setMaxTokenLength(java.lang.Integer)"
fullName: "com.azure.search.models.KeywordTokenizerV2.setMaxTokenLength(Integer maxTokenLength)"
name: "setMaxTokenLength(Integer maxTokenLength)"
nameWithType: "KeywordTokenizerV2.setMaxTokenLength(Integer maxTokenLength)"
summary: "Set the maxTokenLength property: The maximum token length."
parameters:
- description: "the maxTokenLength value to set."
name: "maxTokenLength"
type: "Integer"
syntax: "public KeywordTokenizerV2 setMaxTokenLength(Integer maxTokenLength)"
desc: "Set the maxTokenLength property: The maximum token length. Default is 256. Tokens longer than the maximum length are split. The maximum token length that can be used is 300 characters."
returns:
description: "the KeywordTokenizerV2 object itself."
type: ""
type: "class"
desc: "Emits the entire input as a single token. This tokenizer is implemented using Apache Lucene."
metadata: {}
package: "com.azure.search.models"
artifact: com.azure:azure-search:11.0.0-beta.1