[OpenAI] Add a sample for global batch (#31278)

Add batch sample
This commit is contained in:
Minh-Anh Phan 2024-10-16 11:35:07 -07:00 коммит произвёл GitHub
Родитель e008167f7a
Коммит 327e08506d
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
5 изменённых файлов: 193 добавлений и 0 удалений

Просмотреть файл

@ -0,0 +1,63 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
/**
* Demonstrates how to use Azure OpenAI global batch deployment.
*
* @summary create and retrieve batch content.
* @azsdk-weight 100
*/
import { AzureOpenAI, toFile } from "openai";
import { DefaultAzureCredential, getBearerTokenProvider } from "@azure/identity";
// Set AZURE_OPENAI_ENDPOINT to the endpoint of your
// OpenAI resource. You can find this in the Azure portal.
// Load the .env file if it exists
import "dotenv/config";
export async function main() {
console.log("== Batch Chat Completions Sample ==");
const scope = "https://cognitiveservices.azure.com/.default";
const azureADTokenProvider = getBearerTokenProvider(new DefaultAzureCredential(), scope);
const deployment = "gpt-4-turbo";
const apiVersion = "2024-08-01-preview";
const client = new AzureOpenAI({ azureADTokenProvider, deployment, apiVersion });
const batchContent = `{ "custom_id": "request-1", "method": "POST", "url": "/v1/chat/completions", "body": { "model": "${deployment}", "messages": [{ "role": "system", "content": "You are a helpful assistant." }, { "role": "user", "content": "What is 2+2?" }] } }`;
// Upload a file with "batch" purpose
const file = await client.files.create({
file: await toFile(Buffer.from(batchContent), "batch.jsonl"),
purpose: "batch",
});
// Create the batch
const batch = await client.batches.create({
endpoint: "/v1/chat/completions",
input_file_id: file.id,
completion_window: "24h",
});
console.log(batch);
// Checking batch status
const retrievedBatch = await client.batches.retrieve(batch.id);
console.log(retrievedBatch);
// Retrieve the batch output
const outputFileId = retrievedBatch.output_file_id ?? retrievedBatch.error_file_id;
if (outputFileId) {
const fileResponse = await client.files.content(outputFileId);
const fileContent = await fileResponse.text();
console.log(fileContent);
}
// Clean up file
await client.files.del(file.id);
}
main().catch((err) => {
console.error("The sample encountered an error:", err);
});

Просмотреть файл

@ -17,6 +17,7 @@ These sample programs show how to use the JavaScript client libraries for Azure
| ----------------------------------------------------------------------------------- | ---------------------------------------------------------------------------- |
| [audioTranscription.js][audiotranscription] | audio transcription. |
| [audioTranslation.js][audiotranslation] | audio translation. |
| [batch.js][batch] | create and retrieve batch content. |
| [chatCompletions.js][chatcompletions] | get chat completions. |
| [codeInterpreter.js][codeinterpreter] | interpreting code. |
| [completions.js][completions] | get completions. |
@ -71,6 +72,7 @@ Take a look at our [API Documentation][apiref] for more information about the AP
[audiotranscription]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/audioTranscription.js
[audiotranslation]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/audioTranslation.js
[batch]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/batch.js
[chatcompletions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/chatCompletions.js
[codeinterpreter]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/codeInterpreter.js
[completions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/javascript/completions.js

Просмотреть файл

@ -0,0 +1,64 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
/**
* Demonstrates how to use Azure OpenAI global batch deployment.
*
* @summary create and retrieve batch content.
*/
const { AzureOpenAI, toFile } = require("openai");
const { DefaultAzureCredential, getBearerTokenProvider } = require("@azure/identity");
// Set AZURE_OPENAI_ENDPOINT to the endpoint of your
// OpenAI resource. You can find this in the Azure portal.
// Load the .env file if it exists
require("dotenv/config");
async function main() {
console.log("== Batch Chat Completions Sample ==");
const scope = "https://cognitiveservices.azure.com/.default";
const azureADTokenProvider = getBearerTokenProvider(new DefaultAzureCredential(), scope);
const deployment = "gpt-4-turbo";
const apiVersion = "2024-08-01-preview";
const client = new AzureOpenAI({ azureADTokenProvider, deployment, apiVersion });
const batchContent = `{ "custom_id": "request-1", "method": "POST", "url": "/v1/chat/completions", "body": { "model": "${deployment}", "messages": [{ "role": "system", "content": "You are a helpful assistant." }, { "role": "user", "content": "What is 2+2?" }] } }`;
// Upload a file with "batch" purpose
const file = await client.files.create({
file: await toFile(Buffer.from(batchContent), "batch.jsonl"),
purpose: "batch",
});
// Create the batch
const batch = await client.batches.create({
endpoint: "/v1/chat/completions",
input_file_id: file.id,
completion_window: "24h",
});
console.log(batch);
// Checking batch status
const retrievedBatch = await client.batches.retrieve(batch.id);
console.log(retrievedBatch);
// Retrieve the batch output
const outputFileId = retrievedBatch.output_file_id ?? retrievedBatch.error_file_id;
if (outputFileId) {
const fileResponse = await client.files.content(outputFileId);
const fileContent = await fileResponse.text();
console.log(fileContent);
}
// Clean up file
await client.files.del(file.id);
}
main().catch((err) => {
console.error("The sample encountered an error:", err);
});
module.exports = { main };

Просмотреть файл

@ -17,6 +17,7 @@ These sample programs show how to use the TypeScript client libraries for Azure
| ----------------------------------------------------------------------------------- | ---------------------------------------------------------------------------- |
| [audioTranscription.ts][audiotranscription] | audio transcription. |
| [audioTranslation.ts][audiotranslation] | audio translation. |
| [batch.ts][batch] | create and retrieve batch content. |
| [chatCompletions.ts][chatcompletions] | get chat completions. |
| [codeInterpreter.ts][codeinterpreter] | interpreting code. |
| [completions.ts][completions] | get completions. |
@ -83,6 +84,7 @@ Take a look at our [API Documentation][apiref] for more information about the AP
[audiotranscription]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/audioTranscription.ts
[audiotranslation]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/audioTranslation.ts
[batch]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/batch.ts
[chatcompletions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/chatCompletions.ts
[codeinterpreter]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/codeInterpreter.ts
[completions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/openai/openai/samples/v2-beta/typescript/src/completions.ts

Просмотреть файл

@ -0,0 +1,62 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
/**
* Demonstrates how to use Azure OpenAI global batch deployment.
*
* @summary create and retrieve batch content.
*/
import { AzureOpenAI, toFile } from "openai";
import { DefaultAzureCredential, getBearerTokenProvider } from "@azure/identity";
// Set AZURE_OPENAI_ENDPOINT to the endpoint of your
// OpenAI resource. You can find this in the Azure portal.
// Load the .env file if it exists
import "dotenv/config";
export async function main() {
console.log("== Batch Chat Completions Sample ==");
const scope = "https://cognitiveservices.azure.com/.default";
const azureADTokenProvider = getBearerTokenProvider(new DefaultAzureCredential(), scope);
const deployment = "gpt-4-turbo";
const apiVersion = "2024-08-01-preview";
const client = new AzureOpenAI({ azureADTokenProvider, deployment, apiVersion });
const batchContent = `{ "custom_id": "request-1", "method": "POST", "url": "/v1/chat/completions", "body": { "model": "${deployment}", "messages": [{ "role": "system", "content": "You are a helpful assistant." }, { "role": "user", "content": "What is 2+2?" }] } }`;
// Upload a file with "batch" purpose
const file = await client.files.create({
file: await toFile(Buffer.from(batchContent), "batch.jsonl"),
purpose: "batch",
});
// Create the batch
const batch = await client.batches.create({
endpoint: "/v1/chat/completions",
input_file_id: file.id,
completion_window: "24h",
});
console.log(batch);
// Checking batch status
const retrievedBatch = await client.batches.retrieve(batch.id);
console.log(retrievedBatch);
// Retrieve the batch output
const outputFileId = retrievedBatch.output_file_id ?? retrievedBatch.error_file_id;
if (outputFileId) {
const fileResponse = await client.files.content(outputFileId);
const fileContent = await fileResponse.text();
console.log(fileContent);
}
// Clean up file
await client.files.del(file.id);
}
main().catch((err) => {
console.error("The sample encountered an error:", err);
});