A few minor changes
This commit is contained in:
Родитель
1a7bf53cf4
Коммит
fb72fa8c35
|
@ -29,10 +29,8 @@ Do not remove anything from the result for brevity.
|
|||
Your job is to convert natural language text to XQuery queries over XML in the BaseX database with the format described below.
|
||||
The queries work over the collection of documents in the database.
|
||||
|
||||
Note: All generated XQueries must be case insensitive. Convert to lower case when needed.
|
||||
|
||||
It the prompt contains text in (: ... :), then convert the text into an XQuery query and return the result enclosed in 'Query->' and '<-Query' on single lines.
|
||||
If not, assume that the query is already an XQuery query, and return the query enclosed in 'ProvidedQuery->' and '<-ProvidedQuery' on single lines.
|
||||
If not, assume that the prompt is already an XQuery query, and return the query enclosed in 'ProvidedQuery->' and '<-ProvidedQuery' on single lines.
|
||||
|
||||
Always provide an explanation enclosed in 'E->' and '<-E' on single lines..
|
||||
|
||||
|
@ -86,13 +84,13 @@ declare option output:indent 'yes';
|
|||
}
|
||||
</Results>
|
||||
|
||||
All queries must be case insensitive.
|
||||
Note: All queries must be case insensitive.
|
||||
";
|
||||
|
||||
private string systemPrompt = string.Empty;
|
||||
|
||||
private ChatHistory history = null;
|
||||
private AzureOpenAIChatCompletionService chatCompletionService;
|
||||
private IChatCompletionService chatCompletionService;
|
||||
|
||||
public PromptEvaluator(string systemPrompt)
|
||||
{
|
||||
|
@ -101,8 +99,15 @@ All queries must be case insensitive.
|
|||
|
||||
public async Task<string> EvaluatePromptAsync(string query)
|
||||
{
|
||||
if (systemPrompt.Length == 0)
|
||||
{
|
||||
throw new ArgumentException("The system prompt must be set.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(query))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
// We need the chatGPT instance and the history
|
||||
if (this.history == null)
|
||||
|
@ -113,9 +118,7 @@ All queries must be case insensitive.
|
|||
|
||||
if (chatCompletionService == null)
|
||||
{
|
||||
this.chatCompletionService = new AzureOpenAIChatCompletionService(
|
||||
deploymentName: Secrets.AzureOpenAiModel,
|
||||
openAIClient: Secrets.OpenAIClient.Value);
|
||||
this.chatCompletionService = Secrets.ChatCompletionService.Value;
|
||||
}
|
||||
|
||||
this.history.AddUserMessage(query);
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
using Azure;
|
||||
using Azure.AI.OpenAI;
|
||||
using Azure.Core;
|
||||
using Microsoft.SemanticKernel.ChatCompletion;
|
||||
using Microsoft.SemanticKernel.Connectors.OpenAI;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
|
@ -12,15 +14,86 @@ namespace XppReasoningWpf
|
|||
{
|
||||
static class Secrets
|
||||
{
|
||||
private static string AzureOpenAiApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY");
|
||||
private static string AzureOpenAiEndpoint = "https://openai-pva.openai.azure.com/";
|
||||
/// <summary>
|
||||
/// Determines whether the Azure OpenAI API should be used.
|
||||
/// </summary>
|
||||
public static bool UsesAzureOpenAI => false;
|
||||
|
||||
//public static string AzureOpenAiModel = "chatGPT_GPT35-turbo-0301";
|
||||
public static string AzureOpenAiModel = "GPT-4-32K";
|
||||
public static string OpenAiModel
|
||||
{
|
||||
get
|
||||
{
|
||||
if (UsesAzureOpenAI)
|
||||
{
|
||||
return "GPT-4-32K";
|
||||
}
|
||||
else
|
||||
{
|
||||
return "gpt-4-turbo-2024-04-09";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static Lazy<OpenAIClient> OpenAIClient => new Lazy<OpenAIClient>(new OpenAIClient(
|
||||
new Uri(AzureOpenAiEndpoint),
|
||||
new AzureKeyCredential(AzureOpenAiApiKey)));
|
||||
public static string ApiKey
|
||||
{
|
||||
get
|
||||
{
|
||||
if (UsesAzureOpenAI)
|
||||
{
|
||||
return Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY");
|
||||
}
|
||||
else
|
||||
{
|
||||
return Environment.GetEnvironmentVariable("OpenAIBearerToken");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static Lazy<IChatCompletionService> ChatCompletionService
|
||||
{
|
||||
get
|
||||
{
|
||||
if (UsesAzureOpenAI)
|
||||
{
|
||||
return new Lazy<IChatCompletionService>(new AzureOpenAIChatCompletionService(
|
||||
deploymentName: OpenAiModel,
|
||||
openAIClient: OpenAIClient.Value));
|
||||
}
|
||||
else
|
||||
{
|
||||
return new Lazy<IChatCompletionService>(new OpenAIChatCompletionService(
|
||||
modelId: OpenAiModel,
|
||||
apiKey: ApiKey)) ;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static Lazy<OpenAIClient> OpenAIClient
|
||||
{
|
||||
get
|
||||
{
|
||||
if (UsesAzureOpenAI)
|
||||
{
|
||||
string AzureOpenAiApiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_API_KEY");
|
||||
string AzureOpenAiEndpoint = "https://openai-pva.openai.azure.com/";
|
||||
|
||||
return new Lazy<OpenAIClient>(new OpenAIClient(
|
||||
new Uri(AzureOpenAiEndpoint),
|
||||
new AzureKeyCredential(AzureOpenAiApiKey)));
|
||||
}
|
||||
else
|
||||
{
|
||||
// private OpenAI account
|
||||
string OpenAiApiKey = Environment.GetEnvironmentVariable("OpenAIBearerToken");
|
||||
string OpenAIEndpoint = "https://api.openai.com/v1/chat/completions";
|
||||
|
||||
return new Lazy<OpenAIClient>(new OpenAIClient(
|
||||
new Uri(OpenAIEndpoint),
|
||||
new AzureKeyCredential(OpenAiApiKey)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -346,7 +346,7 @@ namespace XppReasoningWpf.ViewModels
|
|||
{
|
||||
basexQuery = ExtractBetweenStrings(resultingQuery, "ProvidedQuery->", "<-ProvidedQuery");
|
||||
}
|
||||
this.Log = $"Query: {query}\nbasexQuery: {basexQuery}Explanation: {explanation}\n\n";
|
||||
this.Log = $"Query: {query}\n\nbasexQuery: {basexQuery}\n\nExplanation: {explanation}\n\n";
|
||||
|
||||
result = await session.DoQueryAsync(basexQuery,
|
||||
new Tuple<string, string>("database", model.SelectedDatabase.Name),
|
||||
|
@ -999,8 +999,10 @@ namespace XppReasoningWpf.ViewModels
|
|||
try
|
||||
{
|
||||
selectedEditor.Cursor = Cursors.Wait;
|
||||
var prompt = (string)p;
|
||||
result = await this.AIPromptEvaluator.EvaluatePromptAsync(sourceCode + Environment.NewLine + prompt);
|
||||
var userQuery = (string)p;
|
||||
var prompt = sourceCode + Environment.NewLine + userQuery;
|
||||
// var prompt = userQuery + Environment.NewLine + sourceCode;
|
||||
result = await this.AIPromptEvaluator.EvaluatePromptAsync(prompt);
|
||||
}
|
||||
finally
|
||||
{
|
||||
|
|
Загрузка…
Ссылка в новой задаче