### Packages impacted by this PR @azure-rest/ai-inference ### Issues associated with this PR #30554 ### Describe the problem that is addressed by this PR sample fixes in README and samples folder ### What are the possible designs available to address the problem? If there are more than one possible design, why was the one in this PR chosen? ### Are there test cases added in this PR? _(If not, why?)_ ### Provide a list of related PRs _(if any)_ ### Command used to generate this PR:**_(Applicable only to SDK release request PRs)_ ### Checklists - [ ] Added impacted package name to the issue description - [ ] Does this PR needs any fixes in the SDK Generator?** _(If so, create an Issue in the [Autorest/typescript](https://github.com/Azure/autorest.typescript) repository and link it here)_ - [ ] Added a changelog (if necessary)
This commit is contained in:
Родитель
c588b57d86
Коммит
eef7afb8d6
|
@ -223,7 +223,7 @@ async function main(){
|
|||
}
|
||||
for (const choice of response.body.choices) {
|
||||
const completion = choice.message.content;
|
||||
console.log(`Input: ${examplePrompts[promptIndex++]}`);
|
||||
console.log(`Input: ${messages[promptIndex++].content}`);
|
||||
console.log(`Chatbot: ${completion}`);
|
||||
}
|
||||
}
|
||||
|
@ -251,7 +251,7 @@ async function main(){
|
|||
""As a layman I would say: 'I think we have it'. Would you agree?"" Rolf-Dieter Heuer, CERN's director-general, asked the packed auditorium. The physicists assembled there burst into applause.
|
||||
:`;
|
||||
|
||||
const summarizationPrompt = [`
|
||||
const summarizationPrompt = `
|
||||
Summarize the following text.
|
||||
|
||||
Text:
|
||||
|
@ -260,7 +260,7 @@ async function main(){
|
|||
""""""
|
||||
|
||||
Summary:
|
||||
`];
|
||||
`;
|
||||
|
||||
console.log(`Input: ${summarizationPrompt}`);
|
||||
|
||||
|
@ -354,7 +354,7 @@ context -- including the original system and user messages, the response from th
|
|||
calls, and the tool messages that resolved each of those tools -- when making a subsequent request.
|
||||
|
||||
```js
|
||||
const choice = result.choices[0];
|
||||
const choice = result.body.choices[0];
|
||||
const responseMessage = choice.message;
|
||||
if (responseMessage?.role === "assistant") {
|
||||
const requestedToolCalls = responseMessage?.toolCalls;
|
||||
|
|
|
@ -13,6 +13,7 @@ import { createSseStream } from "@azure/core-sse";
|
|||
|
||||
// Load the .env file if it exists
|
||||
import * as dotenv from "dotenv";
|
||||
import { IncomingMessage } from "http";
|
||||
dotenv.config();
|
||||
|
||||
// You will need to set these environment variables or edit the following values
|
||||
|
@ -21,7 +22,7 @@ const endpoint = process.env["ENDPOINT"] || "<endpoint>";
|
|||
export async function main() {
|
||||
console.log("== Streaming Chat Completions Sample ==");
|
||||
|
||||
const client = ModelClient(endpoint, new DefaultAzureCredential()));
|
||||
const client = ModelClient(endpoint, new DefaultAzureCredential());
|
||||
const response = await client.path("/chat/completions").post({
|
||||
body: {
|
||||
messages: [
|
||||
|
@ -44,7 +45,7 @@ export async function main() {
|
|||
throw new Error(`Failed to get chat completions: ${streamToString(stream)}`);
|
||||
}
|
||||
|
||||
const sses = createSseStream(stream);
|
||||
const sses = createSseStream(stream as IncomingMessage);
|
||||
|
||||
for await (const event of sses) {
|
||||
if (event.data === "[DONE]") {
|
||||
|
|
|
@ -64,6 +64,6 @@ Take a look at our [API Documentation][apiref] for more information about the AP
|
|||
[imagefilecompletions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/ai/ai-inference-rest/samples/v1-beta/javascript/imageFileCompletions.js
|
||||
[streamchatcompletions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/ai/ai-inference-rest/samples/v1-beta/javascript/streamChatCompletions.js
|
||||
[toolcall]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/ai/ai-inference-rest/samples/v1-beta/javascript/toolCall.js
|
||||
[apiref]: https://docs.microsoft.com/javascript/api/@azure-rest/ai-inference
|
||||
[apiref]: https://learn.microsoft.com/javascript/api/@azure-rest/ai-inference
|
||||
[freesub]: https://azure.microsoft.com/free/
|
||||
[package]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/ai/ai-inference-rest/README.md
|
||||
|
|
|
@ -76,7 +76,7 @@ Take a look at our [API Documentation][apiref] for more information about the AP
|
|||
[imagefilecompletions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/ai/ai-inference-rest/samples/v1-beta/typescript/src/imageFileCompletions.ts
|
||||
[streamchatcompletions]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/ai/ai-inference-rest/samples/v1-beta/typescript/src/streamChatCompletions.ts
|
||||
[toolcall]: https://github.com/Azure/azure-sdk-for-js/blob/main/sdk/ai/ai-inference-rest/samples/v1-beta/typescript/src/toolCall.ts
|
||||
[apiref]: https://docs.microsoft.com/javascript/api/@azure-rest/ai-inference
|
||||
[apiref]: https://learn.microsoft.com/javascript/api/@azure-rest/ai-inference
|
||||
[freesub]: https://azure.microsoft.com/free/
|
||||
[package]: https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/ai/ai-inference-rest/README.md
|
||||
[typescript]: https://www.typescriptlang.org/docs/home.html
|
||||
|
|
|
@ -13,6 +13,7 @@ import { createSseStream } from "@azure/core-sse";
|
|||
|
||||
// Load the .env file if it exists
|
||||
import * as dotenv from "dotenv";
|
||||
import { IncomingMessage } from "http";
|
||||
dotenv.config();
|
||||
|
||||
// You will need to set these environment variables or edit the following values
|
||||
|
@ -44,7 +45,7 @@ export async function main() {
|
|||
throw new Error(`Failed to get chat completions: ${streamToString(stream)}`);
|
||||
}
|
||||
|
||||
const sses = createSseStream(stream);
|
||||
const sses = createSseStream(stream as IncomingMessage);
|
||||
|
||||
for await (const event of sses) {
|
||||
if (event.data === "[DONE]") {
|
||||
|
|
Загрузка…
Ссылка в новой задаче