azure-docs-sdk-java/docs-ref-autogen/com.azure.storage.file.data...

949 строки
127 KiB
YAML

### YamlMime:JavaType
uid: "com.azure.storage.file.datalake.DataLakeFileClient"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient"
name: "DataLakeFileClient"
nameWithType: "DataLakeFileClient"
summary: "This class provides a client that contains file operations for Azure Storage Data Lake."
inheritances:
- "<xref href=\"java.lang.Object?displayProperty=fullName\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient?displayProperty=fullName\" data-throw-if-not-resolved=\"False\" />"
inheritedClassMethods:
- classRef: "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient?alt=com.azure.storage.file.datalake.DataLakePathClient&text=DataLakePathClient\" data-throw-if-not-resolved=\"False\" />"
methodsRef:
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.create()?alt=com.azure.storage.file.datalake.DataLakePathClient.create&text=create\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.create(boolean)?alt=com.azure.storage.file.datalake.DataLakePathClient.create&text=create\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.createIfNotExists()?alt=com.azure.storage.file.datalake.DataLakePathClient.createIfNotExists&text=createIfNotExists\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.createIfNotExistsWithResponse(com.azure.storage.file.datalake.options.DataLakePathCreateOptions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.createIfNotExistsWithResponse&text=createIfNotExistsWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.createWithResponse(com.azure.storage.file.datalake.options.DataLakePathCreateOptions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.createWithResponse&text=createWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.createWithResponse(java.lang.String,java.lang.String,com.azure.storage.file.datalake.models.PathHttpHeaders,java.util.Map<java.lang.String,java.lang.String>,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.createWithResponse&text=createWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.deleteIfExists()?alt=com.azure.storage.file.datalake.DataLakePathClient.deleteIfExists&text=deleteIfExists\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.deleteIfExistsWithResponse(com.azure.storage.file.datalake.options.DataLakePathDeleteOptions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.deleteIfExistsWithResponse&text=deleteIfExistsWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.exists()?alt=com.azure.storage.file.datalake.DataLakePathClient.exists&text=exists\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.existsWithResponse(java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.existsWithResponse&text=existsWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.generateSas(com.azure.storage.file.datalake.sas.DataLakeServiceSasSignatureValues)?alt=com.azure.storage.file.datalake.DataLakePathClient.generateSas&text=generateSas\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.generateSas(com.azure.storage.file.datalake.sas.DataLakeServiceSasSignatureValues,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.generateSas&text=generateSas\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.generateUserDelegationSas(com.azure.storage.file.datalake.sas.DataLakeServiceSasSignatureValues,com.azure.storage.file.datalake.models.UserDelegationKey)?alt=com.azure.storage.file.datalake.DataLakePathClient.generateUserDelegationSas&text=generateUserDelegationSas\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.generateUserDelegationSas(com.azure.storage.file.datalake.sas.DataLakeServiceSasSignatureValues,com.azure.storage.file.datalake.models.UserDelegationKey,java.lang.String,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.generateUserDelegationSas&text=generateUserDelegationSas\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getAccessControl()?alt=com.azure.storage.file.datalake.DataLakePathClient.getAccessControl&text=getAccessControl\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getAccessControlWithResponse(boolean,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.getAccessControlWithResponse&text=getAccessControlWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getAccountName()?alt=com.azure.storage.file.datalake.DataLakePathClient.getAccountName&text=getAccountName\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getCustomerProvidedKey()?alt=com.azure.storage.file.datalake.DataLakePathClient.getCustomerProvidedKey&text=getCustomerProvidedKey\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getCustomerProvidedKeyClient(com.azure.storage.file.datalake.models.CustomerProvidedKey)?alt=com.azure.storage.file.datalake.DataLakePathClient.getCustomerProvidedKeyClient&text=getCustomerProvidedKeyClient\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getFileSystemName()?alt=com.azure.storage.file.datalake.DataLakePathClient.getFileSystemName&text=getFileSystemName\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getHttpPipeline()?alt=com.azure.storage.file.datalake.DataLakePathClient.getHttpPipeline&text=getHttpPipeline\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getProperties()?alt=com.azure.storage.file.datalake.DataLakePathClient.getProperties&text=getProperties\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getProperties(com.azure.storage.file.datalake.options.PathGetPropertiesOptions)?alt=com.azure.storage.file.datalake.DataLakePathClient.getProperties&text=getProperties\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getPropertiesWithResponse(com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.getPropertiesWithResponse&text=getPropertiesWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.getServiceVersion()?alt=com.azure.storage.file.datalake.DataLakePathClient.getServiceVersion&text=getServiceVersion\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.removeAccessControlRecursive(java.util.List<com.azure.storage.file.datalake.models.PathRemoveAccessControlEntry>)?alt=com.azure.storage.file.datalake.DataLakePathClient.removeAccessControlRecursive&text=removeAccessControlRecursive\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.removeAccessControlRecursiveWithResponse(com.azure.storage.file.datalake.options.PathRemoveAccessControlRecursiveOptions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.removeAccessControlRecursiveWithResponse&text=removeAccessControlRecursiveWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setAccessControlList(java.util.List<com.azure.storage.file.datalake.models.PathAccessControlEntry>,java.lang.String,java.lang.String)?alt=com.azure.storage.file.datalake.DataLakePathClient.setAccessControlList&text=setAccessControlList\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setAccessControlListWithResponse(java.util.List<com.azure.storage.file.datalake.models.PathAccessControlEntry>,java.lang.String,java.lang.String,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.setAccessControlListWithResponse&text=setAccessControlListWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setAccessControlRecursive(java.util.List<com.azure.storage.file.datalake.models.PathAccessControlEntry>)?alt=com.azure.storage.file.datalake.DataLakePathClient.setAccessControlRecursive&text=setAccessControlRecursive\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setAccessControlRecursiveWithResponse(com.azure.storage.file.datalake.options.PathSetAccessControlRecursiveOptions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.setAccessControlRecursiveWithResponse&text=setAccessControlRecursiveWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setHttpHeaders(com.azure.storage.file.datalake.models.PathHttpHeaders)?alt=com.azure.storage.file.datalake.DataLakePathClient.setHttpHeaders&text=setHttpHeaders\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setHttpHeadersWithResponse(com.azure.storage.file.datalake.models.PathHttpHeaders,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.setHttpHeadersWithResponse&text=setHttpHeadersWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setMetadata(java.util.Map<java.lang.String,java.lang.String>)?alt=com.azure.storage.file.datalake.DataLakePathClient.setMetadata&text=setMetadata\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setMetadataWithResponse(java.util.Map<java.lang.String,java.lang.String>,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.setMetadataWithResponse&text=setMetadataWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setPermissions(com.azure.storage.file.datalake.models.PathPermissions,java.lang.String,java.lang.String)?alt=com.azure.storage.file.datalake.DataLakePathClient.setPermissions&text=setPermissions\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.setPermissionsWithResponse(com.azure.storage.file.datalake.models.PathPermissions,java.lang.String,java.lang.String,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.setPermissionsWithResponse&text=setPermissionsWithResponse\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.updateAccessControlRecursive(java.util.List<com.azure.storage.file.datalake.models.PathAccessControlEntry>)?alt=com.azure.storage.file.datalake.DataLakePathClient.updateAccessControlRecursive&text=updateAccessControlRecursive\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.storage.file.datalake.DataLakePathClient.updateAccessControlRecursiveWithResponse(com.azure.storage.file.datalake.options.PathUpdateAccessControlRecursiveOptions,java.time.Duration,com.azure.core.util.Context)?alt=com.azure.storage.file.datalake.DataLakePathClient.updateAccessControlRecursiveWithResponse&text=updateAccessControlRecursiveWithResponse\" data-throw-if-not-resolved=\"False\" />"
- classRef: "java.lang.<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html\">Object</a>"
methodsRef:
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#clone--\">clone</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#equals-java.lang.Object-\">equals</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#finalize--\">finalize</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#getClass--\">getClass</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#hashCode--\">hashCode</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#notify--\">notify</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#notifyAll--\">notifyAll</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#toString--\">toString</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait--\">wait</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-\">wait</a>"
- "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Object.html#wait-long-int-\">wait</a>"
syntax: "public class **DataLakeFileClient**</br> extends <xref href=\"com.azure.storage.file.datalake.DataLakePathClient?alt=com.azure.storage.file.datalake.DataLakePathClient&text=DataLakePathClient\" data-throw-if-not-resolved=\"False\" />"
methods:
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.append(com.azure.core.util.BinaryData,long)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.append(BinaryData data, long fileOffset)"
name: "append(BinaryData data, long fileOffset)"
nameWithType: "DataLakeFileClient.append(BinaryData data, long fileOffset)"
summary: "Appends data to the specified resource to later be flushed (written) by a call to flush"
parameters:
- description: "The data to write to the file."
name: "data"
type: "<xref href=\"com.azure.core.util.BinaryData?alt=com.azure.core.util.BinaryData&text=BinaryData\" data-throw-if-not-resolved=\"False\" />"
- description: "The position where the data is to be appended."
name: "fileOffset"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
syntax: "public void append(BinaryData data, long fileOffset)"
desc: "Appends data to the specified resource to later be flushed (written) by a call to flush\n\n**Code Samples**\n\n```java\nclient.append(binaryData, offset);\n System.out.println(\"Append data completed\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.append(java.io.InputStream,long,long)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.append(InputStream data, long fileOffset, long length)"
name: "append(InputStream data, long fileOffset, long length)"
nameWithType: "DataLakeFileClient.append(InputStream data, long fileOffset, long length)"
summary: "Appends data to the specified resource to later be flushed (written) by a call to flush"
parameters:
- description: "The data to write to the file."
name: "data"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html\">InputStream</a>"
- description: "The position where the data is to be appended."
name: "fileOffset"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "The exact length of the data."
name: "length"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
syntax: "public void append(InputStream data, long fileOffset, long length)"
desc: "Appends data to the specified resource to later be flushed (written) by a call to flush\n\n**Code Samples**\n\n```java\nclient.append(data, offset, length);\n System.out.println(\"Append data completed\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse(com.azure.core.util.BinaryData,long,byte[],java.lang.String,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse(BinaryData data, long fileOffset, byte[] contentMd5, String leaseId, Duration timeout, Context context)"
name: "appendWithResponse(BinaryData data, long fileOffset, byte[] contentMd5, String leaseId, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.appendWithResponse(BinaryData data, long fileOffset, byte[] contentMd5, String leaseId, Duration timeout, Context context)"
summary: "Appends data to the specified resource to later be flushed (written) by a call to flush"
parameters:
- description: "The data to write to the file."
name: "data"
type: "<xref href=\"com.azure.core.util.BinaryData?alt=com.azure.core.util.BinaryData&text=BinaryData\" data-throw-if-not-resolved=\"False\" />"
- description: "The position where the data is to be appended."
name: "fileOffset"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the\n received data and fail the request if it does not match the provided MD5."
name: "contentMd5"
type: "<xref href=\"byte?alt=byte&text=byte\" data-throw-if-not-resolved=\"False\" />[]"
- description: "By setting lease id, requests will fail if the provided lease does not match the active lease on\n the file."
name: "leaseId"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<Void> appendWithResponse(BinaryData data, long fileOffset, byte[] contentMd5, String leaseId, Duration timeout, Context context)"
desc: "Appends data to the specified resource to later be flushed (written) by a call to flush\n\n**Code Samples**\n\n```java\nFileRange range = new FileRange(1024, 2048L);\n DownloadRetryOptions options = new DownloadRetryOptions().setMaxRetryRequests(5);\n byte[] contentMd5 = new byte[0]; // Replace with valid md5\n\n Response<Void> response = client.appendWithResponse(binaryData, offset, contentMd5, leaseId, timeout,\n new Context(key1, value1));\n System.out.printf(\"Append data completed with status %d%n\", response.getStatusCode());\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
returns:
description: "A response signalling completion."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html\">Void</a>&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse(com.azure.core.util.BinaryData,long,com.azure.storage.file.datalake.options.DataLakeFileAppendOptions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse(BinaryData data, long fileOffset, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context)"
name: "appendWithResponse(BinaryData data, long fileOffset, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.appendWithResponse(BinaryData data, long fileOffset, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context)"
summary: "Appends data to the specified resource to later be flushed (written) by a call to flush"
parameters:
- description: "The data to write to the file."
name: "data"
type: "<xref href=\"com.azure.core.util.BinaryData?alt=com.azure.core.util.BinaryData&text=BinaryData\" data-throw-if-not-resolved=\"False\" />"
- description: "The position where the data is to be appended."
name: "fileOffset"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.options.DataLakeFileAppendOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileAppendOptions\"></xref>"
name: "appendOptions"
type: "<xref href=\"com.azure.storage.file.datalake.options.DataLakeFileAppendOptions?alt=com.azure.storage.file.datalake.options.DataLakeFileAppendOptions&text=DataLakeFileAppendOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<Void> appendWithResponse(BinaryData data, long fileOffset, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context)"
desc: "Appends data to the specified resource to later be flushed (written) by a call to flush\n\n**Code Samples**\n\n```java\nBinaryData binaryData = BinaryData.fromStream(data, length);\n FileRange range = new FileRange(1024, 2048L);\n byte[] contentMd5 = new byte[0]; // Replace with valid md5\n DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions()\n .setLeaseId(leaseId)\n .setContentHash(contentMd5)\n .setFlush(true);\n Response<Void> response = client.appendWithResponse(binaryData, offset, appendOptions, timeout,\n new Context(key1, value1));\n System.out.printf(\"Append data completed with status %d%n\", response.getStatusCode());\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
returns:
description: "A response signalling completion."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html\">Void</a>&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse(java.io.InputStream,long,long,byte[],java.lang.String,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse(InputStream data, long fileOffset, long length, byte[] contentMd5, String leaseId, Duration timeout, Context context)"
name: "appendWithResponse(InputStream data, long fileOffset, long length, byte[] contentMd5, String leaseId, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.appendWithResponse(InputStream data, long fileOffset, long length, byte[] contentMd5, String leaseId, Duration timeout, Context context)"
summary: "Appends data to the specified resource to later be flushed (written) by a call to flush"
parameters:
- description: "The data to write to the file."
name: "data"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html\">InputStream</a>"
- description: "The position where the data is to be appended."
name: "fileOffset"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "The exact length of the data."
name: "length"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "An MD5 hash of the content of the data. If specified, the service will calculate the MD5 of the\n received data and fail the request if it does not match the provided MD5."
name: "contentMd5"
type: "<xref href=\"byte?alt=byte&text=byte\" data-throw-if-not-resolved=\"False\" />[]"
- description: "By setting lease id, requests will fail if the provided lease does not match the active lease on\n the file."
name: "leaseId"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length, byte[] contentMd5, String leaseId, Duration timeout, Context context)"
desc: "Appends data to the specified resource to later be flushed (written) by a call to flush\n\n**Code Samples**\n\n```java\nFileRange range = new FileRange(1024, 2048L);\n DownloadRetryOptions options = new DownloadRetryOptions().setMaxRetryRequests(5);\n byte[] contentMd5 = new byte[0]; // Replace with valid md5\n\n Response<Void> response = client.appendWithResponse(data, offset, length, contentMd5, leaseId, timeout,\n new Context(key1, value1));\n System.out.printf(\"Append data completed with status %d%n\", response.getStatusCode());\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
returns:
description: "A response signalling completion."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html\">Void</a>&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse(java.io.InputStream,long,long,com.azure.storage.file.datalake.options.DataLakeFileAppendOptions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.appendWithResponse(InputStream data, long fileOffset, long length, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context)"
name: "appendWithResponse(InputStream data, long fileOffset, long length, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.appendWithResponse(InputStream data, long fileOffset, long length, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context)"
summary: "Appends data to the specified resource to later be flushed (written) by a call to flush"
parameters:
- description: "The data to write to the file."
name: "data"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html\">InputStream</a>"
- description: "The position where the data is to be appended."
name: "fileOffset"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "The exact length of the data."
name: "length"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.options.DataLakeFileAppendOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileAppendOptions\"></xref>"
name: "appendOptions"
type: "<xref href=\"com.azure.storage.file.datalake.options.DataLakeFileAppendOptions?alt=com.azure.storage.file.datalake.options.DataLakeFileAppendOptions&text=DataLakeFileAppendOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<Void> appendWithResponse(InputStream data, long fileOffset, long length, DataLakeFileAppendOptions appendOptions, Duration timeout, Context context)"
desc: "Appends data to the specified resource to later be flushed (written) by a call to flush\n\n**Code Samples**\n\n```java\nFileRange range = new FileRange(1024, 2048L);\n byte[] contentMd5 = new byte[0]; // Replace with valid md5\n DataLakeFileAppendOptions appendOptions = new DataLakeFileAppendOptions()\n .setLeaseId(leaseId)\n .setContentHash(contentMd5)\n .setFlush(true);\n Response<Void> response = client.appendWithResponse(data, offset, length, appendOptions, timeout,\n new Context(key1, value1));\n System.out.printf(\"Append data completed with status %d%n\", response.getStatusCode());\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
returns:
description: "A response signalling completion."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html\">Void</a>&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.delete()"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.delete()"
name: "delete()"
nameWithType: "DataLakeFileClient.delete()"
summary: "Deletes a file."
syntax: "public void delete()"
desc: "Deletes a file.\n\n**Code Samples**\n\n```java\nclient.delete();\n System.out.println(\"Delete request completed\");\n```\n\nFor more information see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/delete"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists()"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExists()"
name: "deleteIfExists()"
nameWithType: "DataLakeFileClient.deleteIfExists()"
summary: "Deletes a file if it exists."
overridden: "com.azure.storage.file.datalake.DataLakePathClient.deleteIfExists()"
syntax: "public boolean deleteIfExists()"
desc: "Deletes a file if it exists.\n\n**Code Samples**\n\n```java\nclient.deleteIfExists();\n System.out.println(\"Delete request completed\");\n```\n\nFor more information see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/delete"
returns:
description: "<code>true</code> if file is successfully deleted, <code>false</code> if the file does not exist."
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse(com.azure.storage.file.datalake.options.DataLakePathDeleteOptions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Duration timeout, Context context)"
name: "deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Duration timeout, Context context)"
summary: "Deletes a file if it exists."
overridden: "com.azure.storage.file.datalake.DataLakePathClient.deleteIfExistsWithResponse(com.azure.storage.file.datalake.options.DataLakePathDeleteOptions,java.time.Duration,com.azure.core.util.Context)"
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.DataLakePathDeleteOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakePathDeleteOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.DataLakePathDeleteOptions?alt=com.azure.storage.file.datalake.options.DataLakePathDeleteOptions&text=DataLakePathDeleteOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<Boolean> deleteIfExistsWithResponse(DataLakePathDeleteOptions options, Duration timeout, Context context)"
desc: "Deletes a file if it exists.\n\n**Code Samples**\n\n```java\nDataLakeRequestConditions requestConditions = new DataLakeRequestConditions()\n .setLeaseId(leaseId);\n DataLakePathDeleteOptions options = new DataLakePathDeleteOptions().setIsRecursive(false)\n .setRequestConditions(requestConditions);\n\n Response<Boolean> response = client.deleteIfExistsWithResponse(options, timeout, new Context(key1, value1));\n if (response.getStatusCode() == 404) {\n System.out.println(\"Does not exist.\");\n } else {\n System.out.printf(\"Delete completed with status %d%n\", response.getStatusCode());\n }\n```\n\nFor more information see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/delete"
returns:
description: "A response containing status code and HTTP headers. If <xref uid=\"com.azure.core.http.rest.Response\" data-throw-if-not-resolved=\"false\" data-raw-source=\"Response\"></xref>'s status code is 200, the file\n was successfully deleted. If status code is 404, the file does not exist."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Boolean.html\">Boolean</a>&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse(com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.deleteWithResponse(DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
name: "deleteWithResponse(DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.deleteWithResponse(DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
summary: "Deletes a file."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions\"></xref>"
name: "requestConditions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions?alt=com.azure.storage.file.datalake.models.DataLakeRequestConditions&text=DataLakeRequestConditions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<Void> deleteWithResponse(DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
desc: "Deletes a file.\n\n**Code Samples**\n\n```java\nDataLakeRequestConditions requestConditions = new DataLakeRequestConditions()\n .setLeaseId(leaseId);\n\n client.deleteWithResponse(requestConditions, timeout, new Context(key1, value1));\n System.out.println(\"Delete request completed\");\n```\n\nFor more information see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/delete"
returns:
description: "A response containing status code and HTTP headers."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html\">Void</a>&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.flush(long)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.flush(long position)"
name: "flush(long position)"
nameWithType: "DataLakeFileClient.flush(long position)"
summary: "Flushes (writes) data previously appended to the file through a call to append."
deprecatedTag: "See <xref uid=\"com.azure.storage.file.datalake.DataLakeFileClient.flush(long,boolean)\" data-throw-if-not-resolved=\"false\" data-raw-source=\"#flush(long, boolean)\"></xref> instead."
parameters:
- description: "The length of the file after all data has been written."
name: "position"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
syntax: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Deprecated.html\">@Deprecated</a></br>public PathInfo flush(long position)"
desc: "Flushes (writes) data previously appended to the file through a call to append. The previously uploaded data must be contiguous.\n\nBy default this method will not overwrite existing data.\n\n**Code Samples**\n\n```java\nclient.flush(position);\n System.out.println(\"Flush data completed\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
hasDeprecatedTag: true
returns:
description: "Information about the created resource."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.flush(long,boolean)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.flush(long position, boolean overwrite)"
name: "flush(long position, boolean overwrite)"
nameWithType: "DataLakeFileClient.flush(long position, boolean overwrite)"
summary: "Flushes (writes) data previously appended to the file through a call to append."
parameters:
- description: "The length of the file after all data has been written."
name: "position"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "Whether to overwrite, should data exist on the file."
name: "overwrite"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
syntax: "public PathInfo flush(long position, boolean overwrite)"
desc: "Flushes (writes) data previously appended to the file through a call to append. The previously uploaded data must be contiguous.\n\n**Code Samples**\n\n```java\nboolean overwrite = true;\n client.flush(position, overwrite);\n System.out.println(\"Flush data completed\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
returns:
description: "Information about the created resource."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse(long,boolean,boolean,com.azure.storage.file.datalake.models.PathHttpHeaders,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse(long position, boolean retainUncommittedData, boolean close, PathHttpHeaders httpHeaders, DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
name: "flushWithResponse(long position, boolean retainUncommittedData, boolean close, PathHttpHeaders httpHeaders, DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.flushWithResponse(long position, boolean retainUncommittedData, boolean close, PathHttpHeaders httpHeaders, DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
summary: "Flushes (writes) data previously appended to the file through a call to append."
parameters:
- description: "The length of the file after all data has been written."
name: "position"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "Whether uncommitted data is to be retained after the operation."
name: "retainUncommittedData"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
- description: "Whether a file changed event raised indicates completion (true) or modification (false)."
name: "close"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.PathHttpHeaders\" data-throw-if-not-resolved=\"false\" data-raw-source=\"httpHeaders\"></xref>"
name: "httpHeaders"
type: "<xref href=\"com.azure.storage.file.datalake.models.PathHttpHeaders?alt=com.azure.storage.file.datalake.models.PathHttpHeaders&text=PathHttpHeaders\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"requestConditions\"></xref>"
name: "requestConditions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions?alt=com.azure.storage.file.datalake.models.DataLakeRequestConditions&text=DataLakeRequestConditions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<PathInfo> flushWithResponse(long position, boolean retainUncommittedData, boolean close, PathHttpHeaders httpHeaders, DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
desc: "Flushes (writes) data previously appended to the file through a call to append. The previously uploaded data must be contiguous.\n\n**Code Samples**\n\n```java\nFileRange range = new FileRange(1024, 2048L);\n DownloadRetryOptions options = new DownloadRetryOptions().setMaxRetryRequests(5);\n byte[] contentMd5 = new byte[0]; // Replace with valid md5\n boolean retainUncommittedData = false;\n boolean close = false;\n PathHttpHeaders httpHeaders = new PathHttpHeaders()\n .setContentLanguage(\"en-US\")\n .setContentType(\"binary\");\n DataLakeRequestConditions requestConditions = new DataLakeRequestConditions()\n .setLeaseId(leaseId);\n\n Response<PathInfo> response = client.flushWithResponse(position, retainUncommittedData, close, httpHeaders,\n requestConditions, timeout, new Context(key1, value1));\n System.out.printf(\"Flush data completed with status %d%n\", response.getStatusCode());\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
returns:
description: "A response containing the information of the created resource."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse(long,com.azure.storage.file.datalake.options.DataLakeFileFlushOptions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.flushWithResponse(long position, DataLakeFileFlushOptions flushOptions, Duration timeout, Context context)"
name: "flushWithResponse(long position, DataLakeFileFlushOptions flushOptions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.flushWithResponse(long position, DataLakeFileFlushOptions flushOptions, Duration timeout, Context context)"
summary: "Flushes (writes) data previously appended to the file through a call to append."
parameters:
- description: "The length of the file after all data has been written."
name: "position"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.options.DataLakeFileFlushOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileFlushOptions\"></xref>"
name: "flushOptions"
type: "<xref href=\"com.azure.storage.file.datalake.options.DataLakeFileFlushOptions?alt=com.azure.storage.file.datalake.options.DataLakeFileFlushOptions&text=DataLakeFileFlushOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<PathInfo> flushWithResponse(long position, DataLakeFileFlushOptions flushOptions, Duration timeout, Context context)"
desc: "Flushes (writes) data previously appended to the file through a call to append. The previously uploaded data must be contiguous.\n\n**Code Samples**\n\n```java\nFileRange range = new FileRange(1024, 2048L);\n DownloadRetryOptions options = new DownloadRetryOptions().setMaxRetryRequests(5);\n byte[] contentMd5 = new byte[0]; // Replace with valid md5\n boolean retainUncommittedData = false;\n boolean close = false;\n PathHttpHeaders httpHeaders = new PathHttpHeaders()\n .setContentLanguage(\"en-US\")\n .setContentType(\"binary\");\n DataLakeRequestConditions requestConditions = new DataLakeRequestConditions()\n .setLeaseId(leaseId);\n\n Integer leaseDuration = 15;\n\n DataLakeFileFlushOptions flushOptions = new DataLakeFileFlushOptions()\n .setUncommittedDataRetained(retainUncommittedData)\n .setClose(close)\n .setPathHttpHeaders(httpHeaders)\n .setRequestConditions(requestConditions)\n .setLeaseAction(LeaseAction.ACQUIRE)\n .setLeaseDuration(leaseDuration)\n .setProposedLeaseId(leaseId);\n\n Response<PathInfo> response = client.flushWithResponse(position, flushOptions, timeout,\n new Context(key1, value1));\n System.out.printf(\"Flush data completed with status %d%n\", response.getStatusCode());\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/update"
returns:
description: "A response containing the information of the created resource."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.getCustomerProvidedKeyClient(com.azure.storage.file.datalake.models.CustomerProvidedKey)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey)"
name: "getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey)"
nameWithType: "DataLakeFileClient.getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey)"
summary: "Creates a new <xref uid=\"com.azure.storage.file.datalake.DataLakeFileClient\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileClient\"></xref> with the specified `customerProvidedKey`."
overridden: "com.azure.storage.file.datalake.DataLakePathClient.getCustomerProvidedKeyClient(com.azure.storage.file.datalake.models.CustomerProvidedKey)"
parameters:
- description: "the <xref uid=\"com.azure.storage.file.datalake.models.CustomerProvidedKey\" data-throw-if-not-resolved=\"false\" data-raw-source=\"CustomerProvidedKey\"></xref> for the blob,\n pass <code>null</code> to use no customer provided key."
name: "customerProvidedKey"
type: "<xref href=\"com.azure.storage.file.datalake.models.CustomerProvidedKey?alt=com.azure.storage.file.datalake.models.CustomerProvidedKey&text=CustomerProvidedKey\" data-throw-if-not-resolved=\"False\" />"
syntax: "public DataLakeFileClient getCustomerProvidedKeyClient(CustomerProvidedKey customerProvidedKey)"
desc: "Creates a new <xref uid=\"com.azure.storage.file.datalake.DataLakeFileClient\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileClient\"></xref> with the specified `customerProvidedKey`."
returns:
description: "a <xref uid=\"com.azure.storage.file.datalake.DataLakeFileClient\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileClient\"></xref> with the specified <code>customerProvidedKey</code>."
type: "<xref href=\"com.azure.storage.file.datalake.DataLakeFileClient?alt=com.azure.storage.file.datalake.DataLakeFileClient&text=DataLakeFileClient\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.getFileName()"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.getFileName()"
name: "getFileName()"
nameWithType: "DataLakeFileClient.getFileName()"
summary: "Gets the name of this file, not including its full path."
syntax: "public String getFileName()"
desc: "Gets the name of this file, not including its full path."
returns:
description: "The name of the file."
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.getFilePath()"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.getFilePath()"
name: "getFilePath()"
nameWithType: "DataLakeFileClient.getFilePath()"
summary: "Gets the path of this file, not including the name of the resource itself."
syntax: "public String getFilePath()"
desc: "Gets the path of this file, not including the name of the resource itself."
returns:
description: "The path of the file."
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.getFileUrl()"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.getFileUrl()"
name: "getFileUrl()"
nameWithType: "DataLakeFileClient.getFileUrl()"
summary: "Gets the URL of the file represented by this client on the Data Lake service."
syntax: "public String getFileUrl()"
desc: "Gets the URL of the file represented by this client on the Data Lake service."
returns:
description: "the URL."
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.getOutputStream()"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.getOutputStream()"
name: "getOutputStream()"
nameWithType: "DataLakeFileClient.getOutputStream()"
summary: "Creates and opens an output stream to write data to the file."
syntax: "public OutputStream getOutputStream()"
desc: "Creates and opens an output stream to write data to the file. If the file already exists on the service, it will be overwritten."
returns:
description: "The <xref uid=\"java.io.OutputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OutputStream\"></xref> that can be used to write to the file."
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html\">OutputStream</a>"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.getOutputStream(com.azure.storage.file.datalake.options.DataLakeFileOutputStreamOptions)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.getOutputStream(DataLakeFileOutputStreamOptions options)"
name: "getOutputStream(DataLakeFileOutputStreamOptions options)"
nameWithType: "DataLakeFileClient.getOutputStream(DataLakeFileOutputStreamOptions options)"
summary: "Creates and opens an output stream to write data to the file."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.DataLakeFileOutputStreamOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileOutputStreamOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.DataLakeFileOutputStreamOptions?alt=com.azure.storage.file.datalake.options.DataLakeFileOutputStreamOptions&text=DataLakeFileOutputStreamOptions\" data-throw-if-not-resolved=\"False\" />"
syntax: "public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options)"
desc: "Creates and opens an output stream to write data to the file. If the file already exists on the service, it will be overwritten.\n\nTo avoid overwriting, pass \"\\*\" to <xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions.setIfNoneMatch(java.lang.String)\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions#setIfNoneMatch(String)\"></xref>."
returns:
description: "The <xref uid=\"java.io.OutputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OutputStream\"></xref> that can be used to write to the file."
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html\">OutputStream</a>"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.getOutputStream(com.azure.storage.file.datalake.options.DataLakeFileOutputStreamOptions,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.getOutputStream(DataLakeFileOutputStreamOptions options, Context context)"
name: "getOutputStream(DataLakeFileOutputStreamOptions options, Context context)"
nameWithType: "DataLakeFileClient.getOutputStream(DataLakeFileOutputStreamOptions options, Context context)"
summary: "Creates and opens an output stream to write data to the file."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.DataLakeFileOutputStreamOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileOutputStreamOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.DataLakeFileOutputStreamOptions?alt=com.azure.storage.file.datalake.options.DataLakeFileOutputStreamOptions&text=DataLakeFileOutputStreamOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public OutputStream getOutputStream(DataLakeFileOutputStreamOptions options, Context context)"
desc: "Creates and opens an output stream to write data to the file. If the file already exists on the service, it will be overwritten.\n\nTo avoid overwriting, pass \"\\*\" to <xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions.setIfNoneMatch(java.lang.String)\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions#setIfNoneMatch(String)\"></xref>."
returns:
description: "The <xref uid=\"java.io.OutputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OutputStream\"></xref> that can be used to write to the file."
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html\">OutputStream</a>"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.openInputStream()"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.openInputStream()"
name: "openInputStream()"
nameWithType: "DataLakeFileClient.openInputStream()"
summary: "Opens a file input stream to download the file."
syntax: "public DataLakeFileOpenInputStreamResult openInputStream()"
desc: "Opens a file input stream to download the file. Locks on ETags.\n\n```java\nDataLakeFileOpenInputStreamResult inputStream = client.openInputStream();\n```"
returns:
description: "An <xref uid=\"java.io.InputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"InputStream\"></xref> object that represents the stream to use for reading from the file."
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeFileOpenInputStreamResult?alt=com.azure.storage.file.datalake.models.DataLakeFileOpenInputStreamResult&text=DataLakeFileOpenInputStreamResult\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.openInputStream(com.azure.storage.file.datalake.options.DataLakeFileInputStreamOptions)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.openInputStream(DataLakeFileInputStreamOptions options)"
name: "openInputStream(DataLakeFileInputStreamOptions options)"
nameWithType: "DataLakeFileClient.openInputStream(DataLakeFileInputStreamOptions options)"
summary: "Opens a file input stream to download the specified range of the file."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.DataLakeFileInputStreamOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileInputStreamOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.DataLakeFileInputStreamOptions?alt=com.azure.storage.file.datalake.options.DataLakeFileInputStreamOptions&text=DataLakeFileInputStreamOptions\" data-throw-if-not-resolved=\"False\" />"
syntax: "public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options)"
desc: "Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option is not specified.\n\n```java\nDataLakeFileInputStreamOptions options = new DataLakeFileInputStreamOptions().setBlockSize(1024)\n .setRequestConditions(new DataLakeRequestConditions());\n DataLakeFileOpenInputStreamResult streamResult = client.openInputStream(options);\n```"
returns:
description: "A <xref uid=\"com.azure.storage.file.datalake.models.DataLakeFileOpenInputStreamResult\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileOpenInputStreamResult\"></xref> object that contains the stream to use for reading from the file."
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeFileOpenInputStreamResult?alt=com.azure.storage.file.datalake.models.DataLakeFileOpenInputStreamResult&text=DataLakeFileOpenInputStreamResult\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.openInputStream(com.azure.storage.file.datalake.options.DataLakeFileInputStreamOptions,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.openInputStream(DataLakeFileInputStreamOptions options, Context context)"
name: "openInputStream(DataLakeFileInputStreamOptions options, Context context)"
nameWithType: "DataLakeFileClient.openInputStream(DataLakeFileInputStreamOptions options, Context context)"
summary: "Opens a file input stream to download the specified range of the file."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.DataLakeFileInputStreamOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileInputStreamOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.DataLakeFileInputStreamOptions?alt=com.azure.storage.file.datalake.options.DataLakeFileInputStreamOptions&text=DataLakeFileInputStreamOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public DataLakeFileOpenInputStreamResult openInputStream(DataLakeFileInputStreamOptions options, Context context)"
desc: "Opens a file input stream to download the specified range of the file. Defaults to ETag locking if the option is not specified.\n\n```java\noptions = new DataLakeFileInputStreamOptions().setBlockSize(1024)\n .setRequestConditions(new DataLakeRequestConditions());\n DataLakeFileOpenInputStreamResult stream = client.openInputStream(options, new Context(key1, value1));\n```"
returns:
description: "A <xref uid=\"com.azure.storage.file.datalake.models.DataLakeFileOpenInputStreamResult\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileOpenInputStreamResult\"></xref> object that contains the stream to use for reading from the file."
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeFileOpenInputStreamResult?alt=com.azure.storage.file.datalake.models.DataLakeFileOpenInputStreamResult&text=DataLakeFileOpenInputStreamResult\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream(java.lang.String)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStream(String expression)"
name: "openQueryInputStream(String expression)"
nameWithType: "DataLakeFileClient.openQueryInputStream(String expression)"
summary: "Opens an input stream to query the file."
parameters:
- description: "The query expression."
name: "expression"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
syntax: "public InputStream openQueryInputStream(String expression)"
desc: "Opens an input stream to query the file.\n\nFor more information, see the [Azure Docs][]\n\n**Code Samples**\n\n```java\nString expression = \"SELECT * from BlobStorage\";\n InputStream inputStream = client.openQueryInputStream(expression);\n // Now you can read from the input stream like you would normally.\n```\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/query-blob-contents"
returns:
description: "An <code>InputStream</code> object that represents the stream to use for reading the query response."
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html\">InputStream</a>"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStreamWithResponse(com.azure.storage.file.datalake.options.FileQueryOptions)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.openQueryInputStreamWithResponse(FileQueryOptions queryOptions)"
name: "openQueryInputStreamWithResponse(FileQueryOptions queryOptions)"
nameWithType: "DataLakeFileClient.openQueryInputStreamWithResponse(FileQueryOptions queryOptions)"
summary: "Opens an input stream to query the file."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.FileQueryOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"The query options\"></xref>."
name: "queryOptions"
type: "<xref href=\"com.azure.storage.file.datalake.options.FileQueryOptions?alt=com.azure.storage.file.datalake.options.FileQueryOptions&text=FileQueryOptions\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<InputStream> openQueryInputStreamWithResponse(FileQueryOptions queryOptions)"
desc: "Opens an input stream to query the file.\n\nFor more information, see the [Azure Docs][]\n\n**Code Samples**\n\n```java\nString expression = \"SELECT * from BlobStorage\";\n FileQuerySerialization input = new FileQueryDelimitedSerialization()\n .setColumnSeparator(',')\n .setEscapeChar('\\n')\n .setRecordSeparator('\\n')\n .setHeadersPresent(true)\n .setFieldQuote('\"');\n FileQuerySerialization output = new FileQueryJsonSerialization()\n .setRecordSeparator('\\n');\n DataLakeRequestConditions requestConditions = new DataLakeRequestConditions()\n .setLeaseId(\"leaseId\");\n Consumer<FileQueryError> errorConsumer = System.out::println;\n Consumer<FileQueryProgress> progressConsumer = progress -> System.out.println(\"total file bytes read: \"\n + progress.getBytesScanned());\n FileQueryOptions queryOptions = new FileQueryOptions(expression)\n .setInputSerialization(input)\n .setOutputSerialization(output)\n .setRequestConditions(requestConditions)\n .setErrorConsumer(errorConsumer)\n .setProgressConsumer(progressConsumer);\n\n InputStream inputStream = client.openQueryInputStreamWithResponse(queryOptions).getValue();\n // Now you can read from the input stream like you would normally.\n```\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/query-blob-contents"
returns:
description: "A response containing status code and HTTP headers including an <code>InputStream</code> object\n that represents the stream to use for reading the query response."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html\">InputStream</a>&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.query(java.io.OutputStream,java.lang.String)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.query(OutputStream stream, String expression)"
name: "query(OutputStream stream, String expression)"
nameWithType: "DataLakeFileClient.query(OutputStream stream, String expression)"
summary: "Queries an entire file into an output stream."
parameters:
- description: "A non-null <xref uid=\"java.io.OutputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OutputStream\"></xref> instance where the downloaded data will be written."
name: "stream"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html\">OutputStream</a>"
- description: "The query expression."
name: "expression"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
syntax: "public void query(OutputStream stream, String expression)"
desc: "Queries an entire file into an output stream.\n\nFor more information, see the [Azure Docs][]\n\n**Code Samples**\n\n```java\nByteArrayOutputStream queryData = new ByteArrayOutputStream();\n String expression = \"SELECT * from BlobStorage\";\n client.query(queryData, expression);\n System.out.println(\"Query completed.\");\n```\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/query-blob-contents"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse(com.azure.storage.file.datalake.options.FileQueryOptions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.queryWithResponse(FileQueryOptions queryOptions, Duration timeout, Context context)"
name: "queryWithResponse(FileQueryOptions queryOptions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.queryWithResponse(FileQueryOptions queryOptions, Duration timeout, Context context)"
summary: "Queries an entire file into an output stream."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.FileQueryOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"The query options\"></xref>."
name: "queryOptions"
type: "<xref href=\"com.azure.storage.file.datalake.options.FileQueryOptions?alt=com.azure.storage.file.datalake.options.FileQueryOptions&text=FileQueryOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public FileQueryResponse queryWithResponse(FileQueryOptions queryOptions, Duration timeout, Context context)"
desc: "Queries an entire file into an output stream.\n\nFor more information, see the [Azure Docs][]\n\n**Code Samples**\n\n```java\nByteArrayOutputStream queryData = new ByteArrayOutputStream();\n String expression = \"SELECT * from BlobStorage\";\n FileQueryJsonSerialization input = new FileQueryJsonSerialization()\n .setRecordSeparator('\\n');\n FileQueryDelimitedSerialization output = new FileQueryDelimitedSerialization()\n .setEscapeChar('\\0')\n .setColumnSeparator(',')\n .setRecordSeparator('\\n')\n .setFieldQuote('\\'')\n .setHeadersPresent(true);\n DataLakeRequestConditions requestConditions = new DataLakeRequestConditions().setLeaseId(leaseId);\n Consumer<FileQueryError> errorConsumer = System.out::println;\n Consumer<FileQueryProgress> progressConsumer = progress -> System.out.println(\"total file bytes read: \"\n + progress.getBytesScanned());\n FileQueryOptions queryOptions = new FileQueryOptions(expression, queryData)\n .setInputSerialization(input)\n .setOutputSerialization(output)\n .setRequestConditions(requestConditions)\n .setErrorConsumer(errorConsumer)\n .setProgressConsumer(progressConsumer);\n System.out.printf(\"Query completed with status %d%n\",\n client.queryWithResponse(queryOptions, timeout, new Context(key1, value1))\n .getStatusCode());\n```\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/query-blob-contents"
returns:
description: "A response containing status code and HTTP headers."
type: "<xref href=\"com.azure.storage.file.datalake.models.FileQueryResponse?alt=com.azure.storage.file.datalake.models.FileQueryResponse&text=FileQueryResponse\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.read(java.io.OutputStream)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.read(OutputStream stream)"
name: "read(OutputStream stream)"
nameWithType: "DataLakeFileClient.read(OutputStream stream)"
summary: "Reads the entire file into an output stream."
parameters:
- description: "A non-null <xref uid=\"java.io.OutputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OutputStream\"></xref> instance where the downloaded data will be written."
name: "stream"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html\">OutputStream</a>"
syntax: "public void read(OutputStream stream)"
desc: "Reads the entire file into an output stream.\n\n**Code Samples**\n\n```java\nclient.read(new ByteArrayOutputStream());\n System.out.println(\"Download completed.\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/get-blob"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.readToFile(com.azure.storage.file.datalake.options.ReadToFileOptions)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.readToFile(ReadToFileOptions options)"
name: "readToFile(ReadToFileOptions options)"
nameWithType: "DataLakeFileClient.readToFile(ReadToFileOptions options)"
summary: "Reads the entire file into a file specified by the path."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.ReadToFileOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"ReadToFileOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.ReadToFileOptions?alt=com.azure.storage.file.datalake.options.ReadToFileOptions&text=ReadToFileOptions\" data-throw-if-not-resolved=\"False\" />"
syntax: "public PathProperties readToFile(ReadToFileOptions options)"
desc: "Reads the entire file into a file specified by the path.\n\nThe file will be created and must not exist, if the file already exists a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"FileAlreadyExistsException\"></xref> will be thrown.\n\n**Code Samples**\n\n```java\nclient.readToFile(new ReadToFileOptions(file));\n System.out.println(\"Completed download to file\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/get-blob"
returns:
description: "The file properties and metadata."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathProperties?alt=com.azure.storage.file.datalake.models.PathProperties&text=PathProperties\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.readToFile(java.lang.String)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.readToFile(String filePath)"
name: "readToFile(String filePath)"
nameWithType: "DataLakeFileClient.readToFile(String filePath)"
summary: "Reads the entire file into a file specified by the path."
parameters:
- description: "A <xref uid=\"java.lang.String\" data-throw-if-not-resolved=\"false\" data-raw-source=\"String\"></xref> representing the filePath where the downloaded data will be written."
name: "filePath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
syntax: "public PathProperties readToFile(String filePath)"
desc: "Reads the entire file into a file specified by the path.\n\nThe file will be created and must not exist, if the file already exists a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"FileAlreadyExistsException\"></xref> will be thrown.\n\n**Code Samples**\n\n```java\nclient.readToFile(file);\n System.out.println(\"Completed download to file\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/get-blob"
returns:
description: "The file properties and metadata."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathProperties?alt=com.azure.storage.file.datalake.models.PathProperties&text=PathProperties\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.readToFile(java.lang.String,boolean)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.readToFile(String filePath, boolean overwrite)"
name: "readToFile(String filePath, boolean overwrite)"
nameWithType: "DataLakeFileClient.readToFile(String filePath, boolean overwrite)"
summary: "Reads the entire file into a file specified by the path."
parameters:
- description: "A <xref uid=\"java.lang.String\" data-throw-if-not-resolved=\"false\" data-raw-source=\"String\"></xref> representing the filePath where the downloaded data will be written."
name: "filePath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "Whether to overwrite the file, should the file exist."
name: "overwrite"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
syntax: "public PathProperties readToFile(String filePath, boolean overwrite)"
desc: "Reads the entire file into a file specified by the path.\n\nIf overwrite is set to false, the file will be created and must not exist, if the file already exists a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"FileAlreadyExistsException\"></xref> will be thrown.\n\n**Code Samples**\n\n```java\nboolean overwrite = false; // Default value\n client.readToFile(file, overwrite);\n System.out.println(\"Completed download to file\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/get-blob"
returns:
description: "The file properties and metadata."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathProperties?alt=com.azure.storage.file.datalake.models.PathProperties&text=PathProperties\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse(com.azure.storage.file.datalake.options.ReadToFileOptions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse(ReadToFileOptions options, Duration timeout, Context context)"
name: "readToFileWithResponse(ReadToFileOptions options, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.readToFileWithResponse(ReadToFileOptions options, Duration timeout, Context context)"
summary: "Reads the entire file into a file specified by the path."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.ReadToFileOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"ReadToFileOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.ReadToFileOptions?alt=com.azure.storage.file.datalake.options.ReadToFileOptions&text=ReadToFileOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<PathProperties> readToFileWithResponse(ReadToFileOptions options, Duration timeout, Context context)"
desc: "Reads the entire file into a file specified by the path.\n\nBy default the file will be created and must not exist, if the file already exists a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"FileAlreadyExistsException\"></xref> will be thrown. To override this behavior, provide appropriate <xref uid=\"java.nio.file.OpenOption\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OpenOptions\"></xref>\n\n**Code Samples**\n\n```java\nReadToFileOptions options = new ReadToFileOptions(file);\n options.setRange(new FileRange(1024, 2048L));\n options.setDownloadRetryOptions(new DownloadRetryOptions().setMaxRetryRequests(5));\n options.setOpenOptions(new HashSet<>(Arrays.asList(StandardOpenOption.CREATE_NEW,\n StandardOpenOption.WRITE, StandardOpenOption.READ))); //Default options\n options.setParallelTransferOptions(new ParallelTransferOptions().setBlockSizeLong(4L * Constants.MB));\n options.setDataLakeRequestConditions(null);\n options.setRangeGetContentMd5(false);\n\n client.readToFileWithResponse(options, timeout, new Context(key2, value2));\n System.out.println(\"Completed download to file\");\n```"
returns:
description: "A response containing the file properties and metadata."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<xref href=\"com.azure.storage.file.datalake.models.PathProperties?alt=com.azure.storage.file.datalake.models.PathProperties&text=PathProperties\" data-throw-if-not-resolved=\"False\" />&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse(java.lang.String,com.azure.storage.file.datalake.models.FileRange,com.azure.storage.common.ParallelTransferOptions,com.azure.storage.file.datalake.models.DownloadRetryOptions,com.azure.storage.file.datalake.models.DataLakeRequestConditions,boolean,java.util.Set<java.nio.file.OpenOption>,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.readToFileWithResponse(String filePath, FileRange range, ParallelTransferOptions parallelTransferOptions, DownloadRetryOptions downloadRetryOptions, DataLakeRequestConditions requestConditions, boolean rangeGetContentMd5, Set<OpenOption> openOptions, Duration timeout, Context context)"
name: "readToFileWithResponse(String filePath, FileRange range, ParallelTransferOptions parallelTransferOptions, DownloadRetryOptions downloadRetryOptions, DataLakeRequestConditions requestConditions, boolean rangeGetContentMd5, Set<OpenOption> openOptions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.readToFileWithResponse(String filePath, FileRange range, ParallelTransferOptions parallelTransferOptions, DownloadRetryOptions downloadRetryOptions, DataLakeRequestConditions requestConditions, boolean rangeGetContentMd5, Set<OpenOption> openOptions, Duration timeout, Context context)"
summary: "Reads the entire file into a file specified by the path."
parameters:
- description: "A <xref uid=\"java.lang.String\" data-throw-if-not-resolved=\"false\" data-raw-source=\"String\"></xref> representing the filePath where the downloaded data will be written."
name: "filePath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.FileRange\" data-throw-if-not-resolved=\"false\" data-raw-source=\"FileRange\"></xref>"
name: "range"
type: "<xref href=\"com.azure.storage.file.datalake.models.FileRange?alt=com.azure.storage.file.datalake.models.FileRange&text=FileRange\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.common.ParallelTransferOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"ParallelTransferOptions\"></xref> to use to download to file. Number of parallel\n transfers parameter is ignored."
name: "parallelTransferOptions"
type: "<xref href=\"com.azure.storage.common.ParallelTransferOptions?alt=com.azure.storage.common.ParallelTransferOptions&text=ParallelTransferOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DownloadRetryOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DownloadRetryOptions\"></xref>"
name: "downloadRetryOptions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DownloadRetryOptions?alt=com.azure.storage.file.datalake.models.DownloadRetryOptions&text=DownloadRetryOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions\"></xref>"
name: "requestConditions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions?alt=com.azure.storage.file.datalake.models.DataLakeRequestConditions&text=DataLakeRequestConditions\" data-throw-if-not-resolved=\"False\" />"
- description: "Whether the contentMD5 for the specified file range should be returned."
name: "rangeGetContentMd5"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"java.nio.file.OpenOption\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OpenOptions\"></xref> to use to configure how to open or create the file."
name: "openOptions"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/util/Set.html\">Set</a>&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/nio/file/OpenOption.html\">OpenOption</a>&gt;"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<PathProperties> readToFileWithResponse(String filePath, FileRange range, ParallelTransferOptions parallelTransferOptions, DownloadRetryOptions downloadRetryOptions, DataLakeRequestConditions requestConditions, boolean rangeGetContentMd5, Set<OpenOption> openOptions, Duration timeout, Context context)"
desc: "Reads the entire file into a file specified by the path.\n\nBy default the file will be created and must not exist, if the file already exists a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"FileAlreadyExistsException\"></xref> will be thrown. To override this behavior, provide appropriate <xref uid=\"java.nio.file.OpenOption\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OpenOptions\"></xref>\n\n**Code Samples**\n\n```java\nFileRange fileRange = new FileRange(1024, 2048L);\n DownloadRetryOptions downloadRetryOptions = new DownloadRetryOptions().setMaxRetryRequests(5);\n Set<OpenOption> openOptions = new HashSet<>(Arrays.asList(StandardOpenOption.CREATE_NEW,\n StandardOpenOption.WRITE, StandardOpenOption.READ)); // Default options\n\n client.readToFileWithResponse(file, fileRange, new ParallelTransferOptions().setBlockSizeLong(4L * Constants.MB),\n downloadRetryOptions, null, false, openOptions, timeout, new Context(key2, value2));\n System.out.println(\"Completed download to file\");\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/get-blob"
returns:
description: "A response containing the file properties and metadata."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<xref href=\"com.azure.storage.file.datalake.models.PathProperties?alt=com.azure.storage.file.datalake.models.PathProperties&text=PathProperties\" data-throw-if-not-resolved=\"False\" />&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse(java.io.OutputStream,com.azure.storage.file.datalake.models.FileRange,com.azure.storage.file.datalake.models.DownloadRetryOptions,com.azure.storage.file.datalake.models.DataLakeRequestConditions,boolean,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.readWithResponse(OutputStream stream, FileRange range, DownloadRetryOptions options, DataLakeRequestConditions requestConditions, boolean getRangeContentMd5, Duration timeout, Context context)"
name: "readWithResponse(OutputStream stream, FileRange range, DownloadRetryOptions options, DataLakeRequestConditions requestConditions, boolean getRangeContentMd5, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.readWithResponse(OutputStream stream, FileRange range, DownloadRetryOptions options, DataLakeRequestConditions requestConditions, boolean getRangeContentMd5, Duration timeout, Context context)"
summary: "Reads a range of bytes from a file into an output stream."
parameters:
- description: "A non-null <xref uid=\"java.io.OutputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"OutputStream\"></xref> instance where the downloaded data will be written."
name: "stream"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/OutputStream.html\">OutputStream</a>"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.FileRange\" data-throw-if-not-resolved=\"false\" data-raw-source=\"FileRange\"></xref>"
name: "range"
type: "<xref href=\"com.azure.storage.file.datalake.models.FileRange?alt=com.azure.storage.file.datalake.models.FileRange&text=FileRange\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DownloadRetryOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DownloadRetryOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.models.DownloadRetryOptions?alt=com.azure.storage.file.datalake.models.DownloadRetryOptions&text=DownloadRetryOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions\"></xref>"
name: "requestConditions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions?alt=com.azure.storage.file.datalake.models.DataLakeRequestConditions&text=DataLakeRequestConditions\" data-throw-if-not-resolved=\"False\" />"
- description: "Whether the contentMD5 for the specified file range should be returned."
name: "getRangeContentMd5"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public FileReadResponse readWithResponse(OutputStream stream, FileRange range, DownloadRetryOptions options, DataLakeRequestConditions requestConditions, boolean getRangeContentMd5, Duration timeout, Context context)"
desc: "Reads a range of bytes from a file into an output stream.\n\n**Code Samples**\n\n```java\nFileRange range = new FileRange(1024, 2048L);\n DownloadRetryOptions options = new DownloadRetryOptions().setMaxRetryRequests(5);\n\n System.out.printf(\"Download completed with status %d%n\",\n client.readWithResponse(new ByteArrayOutputStream(), range, options, null, false,\n timeout, new Context(key2, value2)).getStatusCode());\n```\n\nFor more information, see the [Azure Docs][]\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/get-blob"
returns:
description: "A response containing status code and HTTP headers."
type: "<xref href=\"com.azure.storage.file.datalake.models.FileReadResponse?alt=com.azure.storage.file.datalake.models.FileReadResponse&text=FileReadResponse\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.rename(java.lang.String,java.lang.String)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.rename(String destinationFileSystem, String destinationPath)"
name: "rename(String destinationFileSystem, String destinationPath)"
nameWithType: "DataLakeFileClient.rename(String destinationFileSystem, String destinationPath)"
summary: "Moves the file to another location within the file system."
parameters:
- description: "The file system of the destination within the account.\n <code>null</code> for the current file system."
name: "destinationFileSystem"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "Relative path from the file system to rename the file to, excludes the file system name.\n For example if you want to move a file with fileSystem = \"myfilesystem\", path = \"mydir/hello.txt\" to another path\n in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = \"newdir/hi.txt\""
name: "destinationPath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
syntax: "public DataLakeFileClient rename(String destinationFileSystem, String destinationPath)"
desc: "Moves the file to another location within the file system. For more information see the [Azure Docs][].\n\n**Code Samples**\n\n```java\nDataLakeDirectoryAsyncClient renamedClient = client.rename(fileSystemName, destinationPath).block();\n System.out.println(\"Directory Client has been renamed\");\n```\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create"
returns:
description: "A <xref uid=\"com.azure.storage.file.datalake.DataLakeFileClient\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileClient\"></xref> used to interact with the new file created."
type: "<xref href=\"com.azure.storage.file.datalake.DataLakeFileClient?alt=com.azure.storage.file.datalake.DataLakeFileClient&text=DataLakeFileClient\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse(java.lang.String,java.lang.String,com.azure.storage.file.datalake.models.DataLakeRequestConditions,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.renameWithResponse(String destinationFileSystem, String destinationPath, DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions, Duration timeout, Context context)"
name: "renameWithResponse(String destinationFileSystem, String destinationPath, DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.renameWithResponse(String destinationFileSystem, String destinationPath, DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions, Duration timeout, Context context)"
summary: "Moves the file to another location within the file system."
parameters:
- description: "The file system of the destination within the account.\n <code>null</code> for the current file system."
name: "destinationFileSystem"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "Relative path from the file system to rename the file to, excludes the file system name.\n For example if you want to move a file with fileSystem = \"myfilesystem\", path = \"mydir/hello.txt\" to another path\n in myfilesystem (ex: newdir/hi.txt) then set the destinationPath = \"newdir/hi.txt\""
name: "destinationPath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions\"></xref> against the source."
name: "sourceRequestConditions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions?alt=com.azure.storage.file.datalake.models.DataLakeRequestConditions&text=DataLakeRequestConditions\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions\"></xref> against the destination."
name: "destinationRequestConditions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions?alt=com.azure.storage.file.datalake.models.DataLakeRequestConditions&text=DataLakeRequestConditions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<DataLakeFileClient> renameWithResponse(String destinationFileSystem, String destinationPath, DataLakeRequestConditions sourceRequestConditions, DataLakeRequestConditions destinationRequestConditions, Duration timeout, Context context)"
desc: "Moves the file to another location within the file system. For more information, see the [Azure Docs][].\n\n**Code Samples**\n\n```java\nDataLakeRequestConditions sourceRequestConditions = new DataLakeRequestConditions()\n .setLeaseId(leaseId);\n DataLakeRequestConditions destinationRequestConditions = new DataLakeRequestConditions();\n\n DataLakeFileClient newRenamedClient = client.renameWithResponse(fileSystemName, destinationPath,\n sourceRequestConditions, destinationRequestConditions, timeout, new Context(key1, value1)).getValue();\n System.out.println(\"Directory Client has been renamed\");\n```\n\n\n[Azure Docs]: https://docs.microsoft.com/rest/api/storageservices/datalakestoragegen2/path/create"
returns:
description: "A <xref uid=\"com.azure.core.http.rest.Response\" data-throw-if-not-resolved=\"false\" data-raw-source=\"Response\"></xref> whose <xref uid=\"com.azure.core.http.rest.Response.getValue*\" data-throw-if-not-resolved=\"false\" data-raw-source=\"value\"></xref> that contains a <xref uid=\"com.azure.storage.file.datalake.DataLakeFileClient\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeFileClient\"></xref>\n used to interact with the file created."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<xref href=\"com.azure.storage.file.datalake.DataLakeFileClient?alt=com.azure.storage.file.datalake.DataLakeFileClient&text=DataLakeFileClient\" data-throw-if-not-resolved=\"False\" />&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion(com.azure.storage.file.datalake.options.FileScheduleDeletionOptions)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletion(FileScheduleDeletionOptions options)"
name: "scheduleDeletion(FileScheduleDeletionOptions options)"
nameWithType: "DataLakeFileClient.scheduleDeletion(FileScheduleDeletionOptions options)"
summary: "Schedules the file for deletion."
parameters:
- description: "Schedule deletion parameters."
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.FileScheduleDeletionOptions?alt=com.azure.storage.file.datalake.options.FileScheduleDeletionOptions&text=FileScheduleDeletionOptions\" data-throw-if-not-resolved=\"False\" />"
syntax: "public void scheduleDeletion(FileScheduleDeletionOptions options)"
desc: "Schedules the file for deletion.\n\n**Code Samples**\n\n```java\nFileScheduleDeletionOptions options = new FileScheduleDeletionOptions(OffsetDateTime.now().plusDays(1));\n client.scheduleDeletion(options);\n System.out.println(\"File deletion has been scheduled\");\n```"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse(com.azure.storage.file.datalake.options.FileScheduleDeletionOptions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.scheduleDeletionWithResponse(FileScheduleDeletionOptions options, Duration timeout, Context context)"
name: "scheduleDeletionWithResponse(FileScheduleDeletionOptions options, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.scheduleDeletionWithResponse(FileScheduleDeletionOptions options, Duration timeout, Context context)"
summary: "Schedules the file for deletion."
parameters:
- description: "Schedule deletion parameters."
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.FileScheduleDeletionOptions?alt=com.azure.storage.file.datalake.options.FileScheduleDeletionOptions&text=FileScheduleDeletionOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<Void> scheduleDeletionWithResponse(FileScheduleDeletionOptions options, Duration timeout, Context context)"
desc: "Schedules the file for deletion.\n\n**Code Samples**\n\n```java\nFileScheduleDeletionOptions options = new FileScheduleDeletionOptions(OffsetDateTime.now().plusDays(1));\n Context context = new Context(\"key\", \"value\");\n\n client.scheduleDeletionWithResponse(options, timeout, context);\n System.out.println(\"File deletion has been scheduled\");\n```"
returns:
description: "A response containing status code and HTTP headers."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/Void.html\">Void</a>&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.upload(com.azure.core.util.BinaryData)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.upload(BinaryData data)"
name: "upload(BinaryData data)"
nameWithType: "DataLakeFileClient.upload(BinaryData data)"
summary: "Creates a new file."
parameters:
- description: "The data to write to the blob. The data must be markable. This is in order to support retries. If\n the data is not markable, consider wrapping your data source in a <a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/BufferedInputStream.html\">BufferedInputStream</a> to add mark\n support."
name: "data"
type: "<xref href=\"com.azure.core.util.BinaryData?alt=com.azure.core.util.BinaryData&text=BinaryData\" data-throw-if-not-resolved=\"False\" />"
syntax: "public PathInfo upload(BinaryData data)"
desc: "Creates a new file. By default, this method will not overwrite an existing file.\n\n**Code Samples**\n\n```java\ntry {\n client.upload(binaryData);\n System.out.println(\"Upload from file succeeded\");\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
returns:
description: "Information about the uploaded path."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.upload(com.azure.core.util.BinaryData,boolean)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.upload(BinaryData data, boolean overwrite)"
name: "upload(BinaryData data, boolean overwrite)"
nameWithType: "DataLakeFileClient.upload(BinaryData data, boolean overwrite)"
summary: "Creates a new file, or updates the content of an existing file."
parameters:
- description: "The data to write to the blob. The data must be markable. This is in order to support retries. If\n the data is not markable, consider wrapping your data source in a <a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/BufferedInputStream.html\">BufferedInputStream</a> to add mark\n support."
name: "data"
type: "<xref href=\"com.azure.core.util.BinaryData?alt=com.azure.core.util.BinaryData&text=BinaryData\" data-throw-if-not-resolved=\"False\" />"
- description: "Whether to overwrite, should data exist on the file."
name: "overwrite"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
syntax: "public PathInfo upload(BinaryData data, boolean overwrite)"
desc: "Creates a new file, or updates the content of an existing file.\n\n**Code Samples**\n\n```java\ntry {\n boolean overwrite = false;\n client.upload(binaryData, overwrite);\n System.out.println(\"Upload from file succeeded\");\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
returns:
description: "Information about the uploaded path."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.upload(java.io.InputStream,long)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.upload(InputStream data, long length)"
name: "upload(InputStream data, long length)"
nameWithType: "DataLakeFileClient.upload(InputStream data, long length)"
summary: "Creates a new file."
parameters:
- description: "The data to write to the blob. The data must be markable. This is in order to support retries. If\n the data is not markable, consider wrapping your data source in a <a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/BufferedInputStream.html\">BufferedInputStream</a> to add mark\n support."
name: "data"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html\">InputStream</a>"
- description: "The exact length of the data. It is important that this value match precisely the length of the\n data provided in the <xref uid=\"java.io.InputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"InputStream\"></xref>."
name: "length"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
syntax: "public PathInfo upload(InputStream data, long length)"
desc: "Creates a new file. By default, this method will not overwrite an existing file.\n\n**Code Samples**\n\n```java\ntry {\n client.upload(data, length);\n System.out.println(\"Upload from file succeeded\");\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
returns:
description: "Information about the uploaded path."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.upload(java.io.InputStream,long,boolean)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.upload(InputStream data, long length, boolean overwrite)"
name: "upload(InputStream data, long length, boolean overwrite)"
nameWithType: "DataLakeFileClient.upload(InputStream data, long length, boolean overwrite)"
summary: "Creates a new file, or updates the content of an existing file."
parameters:
- description: "The data to write to the blob. The data must be markable. This is in order to support retries. If\n the data is not markable, consider wrapping your data source in a <a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/BufferedInputStream.html\">BufferedInputStream</a> to add mark\n support."
name: "data"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/io/InputStream.html\">InputStream</a>"
- description: "The exact length of the data. It is important that this value match precisely the length of the\n data provided in the <xref uid=\"java.io.InputStream\" data-throw-if-not-resolved=\"false\" data-raw-source=\"InputStream\"></xref>."
name: "length"
type: "<xref href=\"long?alt=long&text=long\" data-throw-if-not-resolved=\"False\" />"
- description: "Whether to overwrite, should data exist on the file."
name: "overwrite"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
syntax: "public PathInfo upload(InputStream data, long length, boolean overwrite)"
desc: "Creates a new file, or updates the content of an existing file.\n\n**Code Samples**\n\n```java\ntry {\n boolean overwrite = false;\n client.upload(data, length, overwrite);\n System.out.println(\"Upload from file succeeded\");\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
returns:
description: "Information about the uploaded path."
type: "<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile(java.lang.String)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile(String filePath)"
name: "uploadFromFile(String filePath)"
nameWithType: "DataLakeFileClient.uploadFromFile(String filePath)"
summary: "Creates a file, with the content of the specified file."
parameters:
- description: "Path of the file to upload"
name: "filePath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
syntax: "public void uploadFromFile(String filePath)"
desc: "Creates a file, with the content of the specified file. By default, this method will not overwrite an existing file.\n\n**Code Samples**\n\n```java\ntry {\n client.uploadFromFile(filePath);\n System.out.println(\"Upload from file succeeded\");\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile(java.lang.String,boolean)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile(String filePath, boolean overwrite)"
name: "uploadFromFile(String filePath, boolean overwrite)"
nameWithType: "DataLakeFileClient.uploadFromFile(String filePath, boolean overwrite)"
summary: "Creates a file, with the content of the specified file."
parameters:
- description: "Path of the file to upload"
name: "filePath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "Whether to overwrite, should the file already exist"
name: "overwrite"
type: "<xref href=\"boolean?alt=boolean&text=boolean\" data-throw-if-not-resolved=\"False\" />"
syntax: "public void uploadFromFile(String filePath, boolean overwrite)"
desc: "Creates a file, with the content of the specified file.\n\n**Code Samples**\n\n```java\ntry {\n boolean overwrite = false;\n client.uploadFromFile(filePath, overwrite);\n System.out.println(\"Upload from file succeeded\");\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile(java.lang.String,com.azure.storage.common.ParallelTransferOptions,com.azure.storage.file.datalake.models.PathHttpHeaders,java.util.Map<java.lang.String,java.lang.String>,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String,String> metadata, DataLakeRequestConditions requestConditions, Duration timeout)"
name: "uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String,String> metadata, DataLakeRequestConditions requestConditions, Duration timeout)"
nameWithType: "DataLakeFileClient.uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String,String> metadata, DataLakeRequestConditions requestConditions, Duration timeout)"
summary: "Creates a file, with the content of the specified file."
parameters:
- description: "Path of the file to upload"
name: "filePath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "<xref uid=\"com.azure.storage.common.ParallelTransferOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"ParallelTransferOptions\"></xref> used to configure buffered uploading."
name: "parallelTransferOptions"
type: "<xref href=\"com.azure.storage.common.ParallelTransferOptions?alt=com.azure.storage.common.ParallelTransferOptions&text=ParallelTransferOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.PathHttpHeaders\" data-throw-if-not-resolved=\"false\" data-raw-source=\"PathHttpHeaders\"></xref>"
name: "headers"
type: "<xref href=\"com.azure.storage.file.datalake.models.PathHttpHeaders?alt=com.azure.storage.file.datalake.models.PathHttpHeaders&text=PathHttpHeaders\" data-throw-if-not-resolved=\"False\" />"
- description: "Metadata to associate with the resource. If there is leading or trailing whitespace in any\n metadata key or value, it must be removed or encoded."
name: "metadata"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/util/Map.html\">Map</a>&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>,<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>&gt;"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions\"></xref>"
name: "requestConditions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions?alt=com.azure.storage.file.datalake.models.DataLakeRequestConditions&text=DataLakeRequestConditions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
syntax: "public void uploadFromFile(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String,String> metadata, DataLakeRequestConditions requestConditions, Duration timeout)"
desc: "Creates a file, with the content of the specified file.\n\nTo avoid overwriting, pass \"\\*\" to <xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions.setIfNoneMatch(java.lang.String)\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions#setIfNoneMatch(String)\"></xref>.\n\n**Code Samples**\n\n```java\nPathHttpHeaders headers = new PathHttpHeaders()\n .setContentMd5(\"data\".getBytes(StandardCharsets.UTF_8))\n .setContentLanguage(\"en-US\")\n .setContentType(\"binary\");\n\n Map<String, String> metadata = Collections.singletonMap(\"metadata\", \"value\");\n DataLakeRequestConditions requestConditions = new DataLakeRequestConditions()\n .setLeaseId(leaseId)\n .setIfUnmodifiedSince(OffsetDateTime.now().minusDays(3));\n Long blockSize = 100L * 1024L * 1024L; // 100 MB;\n ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions().setBlockSizeLong(blockSize);\n\n try {\n client.uploadFromFile(filePath, parallelTransferOptions, headers, metadata, requestConditions, timeout);\n System.out.println(\"Upload from file succeeded\");\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse(java.lang.String,com.azure.storage.common.ParallelTransferOptions,com.azure.storage.file.datalake.models.PathHttpHeaders,java.util.Map<java.lang.String,java.lang.String>,com.azure.storage.file.datalake.models.DataLakeRequestConditions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.uploadFromFileWithResponse(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String,String> metadata, DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
name: "uploadFromFileWithResponse(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String,String> metadata, DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.uploadFromFileWithResponse(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String,String> metadata, DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
summary: "Creates a file, with the content of the specified file."
parameters:
- description: "Path of the file to upload"
name: "filePath"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>"
- description: "<xref uid=\"com.azure.storage.common.ParallelTransferOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"ParallelTransferOptions\"></xref> used to configure buffered uploading."
name: "parallelTransferOptions"
type: "<xref href=\"com.azure.storage.common.ParallelTransferOptions?alt=com.azure.storage.common.ParallelTransferOptions&text=ParallelTransferOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.PathHttpHeaders\" data-throw-if-not-resolved=\"false\" data-raw-source=\"PathHttpHeaders\"></xref>"
name: "headers"
type: "<xref href=\"com.azure.storage.file.datalake.models.PathHttpHeaders?alt=com.azure.storage.file.datalake.models.PathHttpHeaders&text=PathHttpHeaders\" data-throw-if-not-resolved=\"False\" />"
- description: "Metadata to associate with the resource. If there is leading or trailing whitespace in any\n metadata key or value, it must be removed or encoded."
name: "metadata"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/util/Map.html\">Map</a>&lt;<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>,<a href=\"https://docs.oracle.com/javase/8/docs/api/java/lang/String.html\">String</a>&gt;"
- description: "<xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions\"></xref>"
name: "requestConditions"
type: "<xref href=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions?alt=com.azure.storage.file.datalake.models.DataLakeRequestConditions&text=DataLakeRequestConditions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<PathInfo> uploadFromFileWithResponse(String filePath, ParallelTransferOptions parallelTransferOptions, PathHttpHeaders headers, Map<String,String> metadata, DataLakeRequestConditions requestConditions, Duration timeout, Context context)"
desc: "Creates a file, with the content of the specified file.\n\nTo avoid overwriting, pass \"\\*\" to <xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions.setIfNoneMatch(java.lang.String)\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions#setIfNoneMatch(String)\"></xref>.\n\n**Code Samples**\n\n```java\nPathHttpHeaders headers = new PathHttpHeaders()\n .setContentMd5(\"data\".getBytes(StandardCharsets.UTF_8))\n .setContentLanguage(\"en-US\")\n .setContentType(\"binary\");\n\n Map<String, String> metadata = Collections.singletonMap(\"metadata\", \"value\");\n DataLakeRequestConditions requestConditions = new DataLakeRequestConditions()\n .setLeaseId(leaseId)\n .setIfUnmodifiedSince(OffsetDateTime.now().minusDays(3));\n Long blockSize = 100L * 1024L * 1024L; // 100 MB;\n ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions().setBlockSizeLong(blockSize);\n\n try {\n Response<PathInfo> response = client.uploadFromFileWithResponse(filePath, parallelTransferOptions, headers,\n metadata, requestConditions, timeout, new Context(\"key\", \"value\"));\n System.out.printf(\"Upload from file succeeded with status %d%n\", response.getStatusCode());\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
returns:
description: "Response containing information about the uploaded path."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />&gt;"
- uid: "com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse(com.azure.storage.file.datalake.options.FileParallelUploadOptions,java.time.Duration,com.azure.core.util.Context)"
fullName: "com.azure.storage.file.datalake.DataLakeFileClient.uploadWithResponse(FileParallelUploadOptions options, Duration timeout, Context context)"
name: "uploadWithResponse(FileParallelUploadOptions options, Duration timeout, Context context)"
nameWithType: "DataLakeFileClient.uploadWithResponse(FileParallelUploadOptions options, Duration timeout, Context context)"
summary: "Creates a new file."
parameters:
- description: "<xref uid=\"com.azure.storage.file.datalake.options.FileParallelUploadOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"FileParallelUploadOptions\"></xref>"
name: "options"
type: "<xref href=\"com.azure.storage.file.datalake.options.FileParallelUploadOptions?alt=com.azure.storage.file.datalake.options.FileParallelUploadOptions&text=FileParallelUploadOptions\" data-throw-if-not-resolved=\"False\" />"
- description: "An optional timeout value beyond which a <xref uid=\"\" data-throw-if-not-resolved=\"false\" data-raw-source=\"RuntimeException\"></xref> will be raised."
name: "timeout"
type: "<a href=\"https://docs.oracle.com/javase/8/docs/api/java/time/Duration.html\">Duration</a>"
- description: "Additional context that is passed through the Http pipeline during the service call."
name: "context"
type: "<xref href=\"com.azure.core.util.Context?alt=com.azure.core.util.Context&text=Context\" data-throw-if-not-resolved=\"False\" />"
syntax: "public Response<PathInfo> uploadWithResponse(FileParallelUploadOptions options, Duration timeout, Context context)"
desc: "Creates a new file. To avoid overwriting, pass \"\\*\" to <xref uid=\"com.azure.storage.file.datalake.models.DataLakeRequestConditions.setIfNoneMatch(java.lang.String)\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakeRequestConditions#setIfNoneMatch(String)\"></xref>.\n\n**Code Samples**\n\n```java\nPathHttpHeaders headers = new PathHttpHeaders()\n .setContentMd5(\"data\".getBytes(StandardCharsets.UTF_8))\n .setContentLanguage(\"en-US\")\n .setContentType(\"binary\");\n\n Map<String, String> metadata = Collections.singletonMap(\"metadata\", \"value\");\n DataLakeRequestConditions requestConditions = new DataLakeRequestConditions()\n .setLeaseId(leaseId)\n .setIfUnmodifiedSince(OffsetDateTime.now().minusDays(3));\n Long blockSize = 100L * 1024L * 1024L; // 100 MB;\n ParallelTransferOptions parallelTransferOptions = new ParallelTransferOptions().setBlockSizeLong(blockSize);\n\n try {\n client.uploadWithResponse(new FileParallelUploadOptions(data, length)\n .setParallelTransferOptions(parallelTransferOptions).setHeaders(headers)\n .setMetadata(metadata).setRequestConditions(requestConditions)\n .setPermissions(\"permissions\").setUmask(\"umask\"), timeout, new Context(\"key\", \"value\"));\n System.out.println(\"Upload from file succeeded\");\n } catch (UncheckedIOException ex) {\n System.err.printf(\"Failed to upload from file %s%n\", ex.getMessage());\n }\n```"
returns:
description: "Information about the uploaded path."
type: "<xref href=\"com.azure.core.http.rest.Response?alt=com.azure.core.http.rest.Response&text=Response\" data-throw-if-not-resolved=\"False\" />&lt;<xref href=\"com.azure.storage.file.datalake.models.PathInfo?alt=com.azure.storage.file.datalake.models.PathInfo&text=PathInfo\" data-throw-if-not-resolved=\"False\" />&gt;"
type: "class"
desc: "This class provides a client that contains file operations for Azure Storage Data Lake. Operations provided by this client include creating a file, deleting a file, renaming a file, setting metadata and http headers, setting and retrieving access control, getting properties, reading a file, and appending and flushing data to write to a file.\n\nThis client is instantiated through <xref uid=\"com.azure.storage.file.datalake.DataLakePathClientBuilder\" data-throw-if-not-resolved=\"false\" data-raw-source=\"DataLakePathClientBuilder\"></xref> or retrieved via <xref uid=\"com.azure.storage.file.datalake.DataLakeFileSystemClient.getFileClient(java.lang.String)\" data-throw-if-not-resolved=\"false\" data-raw-source=\"getFileClient\"></xref>.\n\nPlease refer to the [Azure Docs][] for more information.\n\n\n[Azure Docs]: https://docs.microsoft.com/azure/storage/blobs/data-lake-storage-introduction"
metadata: {}
package: "com.azure.storage.file.datalake"
artifact: com.azure:azure-storage-file-datalake:12.20.1