|
8 | 8 | using Azure.Identity; |
9 | 9 | #endif |
10 | 10 | using Microsoft.Agents.AI; |
| 11 | +using Microsoft.Agents.AI.DevUI; |
11 | 12 | using Microsoft.Agents.AI.Hosting; |
12 | 13 | using Microsoft.Agents.AI.Workflows; |
13 | 14 | using Microsoft.Extensions.AI; |
14 | 15 | #if (IsOllama) |
15 | 16 | using OllamaSharp; |
16 | | -#elif (IsGHModels || IsOpenAI || IsAzureOpenAI) |
| 17 | +#endif |
| 18 | +#if (IsGHModels || IsAzureOpenAI) |
17 | 19 | using OpenAI; |
18 | 20 | #endif |
| 21 | +#if (IsGHModels || IsOpenAI || IsAzureOpenAI) |
| 22 | +using OpenAI.Chat; |
| 23 | +#endif |
19 | 24 |
|
20 | 25 | var builder = WebApplication.CreateBuilder(args); |
21 | 26 |
|
22 | 27 | #if (IsGHModels) |
23 | 28 | // You will need to set the token to your own value |
24 | 29 | // You can do this using Visual Studio's "Manage User Secrets" UI, or on the command line: |
25 | 30 | // cd this-project-directory |
26 | | -// dotnet user-secrets set GitHubModels:Token YOUR-GITHUB-TOKEN |
27 | | -var credential = new ApiKeyCredential(builder.Configuration["GitHubModels:Token"] ?? throw new InvalidOperationException("Missing configuration: GitHubModels:Token. See README for details.")); |
28 | | -var openAIOptions = new OpenAIClientOptions { Endpoint = new Uri("https://models.inference.ai.azure.com") }; |
29 | | - |
30 | | -var chatClient = new OpenAIClient(credential, openAIOptions) |
31 | | - .GetChatClient("gpt-4o-mini").AsIChatClient(); |
32 | | -#elif (IsOllama) |
33 | | -// You will need to have Ollama running locally with the llama3.2 model installed |
34 | | -// Visit https://ollama.com for installation instructions |
35 | | -var chatClient = new OllamaApiClient(new Uri("http://localhost:11434"), "llama3.2"); |
| 31 | +// dotnet user-secrets set "GITHUB_TOKEN" "your-github-models-token-here" |
| 32 | +var chatClient = new ChatClient( |
| 33 | + "gpt-4o-mini", |
| 34 | + new ApiKeyCredential(builder.Configuration["GITHUB_TOKEN"] ?? throw new InvalidOperationException("Missing configuration: GITHUB_TOKEN")), |
| 35 | + new OpenAIClientOptions { Endpoint = new Uri("https://models.inference.ai.azure.com") }) |
| 36 | + .AsIChatClient(); |
36 | 37 | #elif (IsOpenAI) |
37 | 38 | // You will need to set the API key to your own value |
38 | 39 | // You can do this using Visual Studio's "Manage User Secrets" UI, or on the command line: |
39 | 40 | // cd this-project-directory |
40 | | -// dotnet user-secrets set OpenAI:Key YOUR-API-KEY |
41 | | -var openAIClient = new OpenAIClient( |
42 | | - new ApiKeyCredential(builder.Configuration["OpenAI:Key"] ?? throw new InvalidOperationException("Missing configuration: OpenAI:Key. See README for details."))); |
43 | | - |
44 | | -#pragma warning disable OPENAI001 // GetOpenAIResponseClient(string) is experimental and subject to change or removal in future updates. |
45 | | -var chatClient = openAIClient.GetOpenAIResponseClient("gpt-4o-mini").AsIChatClient(); |
46 | | -#pragma warning restore OPENAI001 |
| 41 | +// dotnet user-secrets set "OPENAI_KEY" "your-openai-api-key-here" |
| 42 | +var chatClient = new ChatClient( |
| 43 | + "gpt-4o-mini", |
| 44 | + new ApiKeyCredential(builder.Configuration["OPENAI_KEY"] ?? throw new InvalidOperationException("Missing configuration: OPENAI_KEY"))) |
| 45 | + .AsIChatClient(); |
47 | 46 | #elif (IsAzureOpenAI) |
48 | 47 | // You will need to set the endpoint to your own value |
49 | 48 | // You can do this using Visual Studio's "Manage User Secrets" UI, or on the command line: |
|
52 | 51 | #if (!IsManagedIdentity) |
53 | 52 | // dotnet user-secrets set AzureOpenAI:Key YOUR-API-KEY |
54 | 53 | #endif |
55 | | -var azureOpenAIEndpoint = new Uri(new Uri(builder.Configuration["AzureOpenAI:Endpoint"] ?? throw new InvalidOperationException("Missing configuration: AzureOpenAI:Endpoint. See README for details.")), "/openai/v1"); |
56 | | -#if (IsManagedIdentity) |
57 | | -#pragma warning disable OPENAI001 // OpenAIClient(AuthenticationPolicy, OpenAIClientOptions) and GetOpenAIResponseClient(string) are experimental and subject to change or removal in future updates. |
58 | | -var azureOpenAI = new OpenAIClient( |
59 | | - new BearerTokenPolicy(new DefaultAzureCredential(), "https://ai.azure.com/.default"), |
60 | | - new OpenAIClientOptions { Endpoint = azureOpenAIEndpoint }); |
61 | | - |
62 | | -#elif (!IsManagedIdentity) |
63 | | -var openAIOptions = new OpenAIClientOptions { Endpoint = azureOpenAIEndpoint }; |
64 | | -var azureOpenAI = new OpenAIClient(new ApiKeyCredential(builder.Configuration["AzureOpenAI:Key"] ?? throw new InvalidOperationException("Missing configuration: AzureOpenAI:Key. See README for details.")), openAIOptions); |
| 54 | +var azureOpenAIEndpoint = new Uri(new Uri(builder.Configuration["AzureOpenAI:Endpoint"] ?? throw new InvalidOperationException("Missing configuration: AzureOpenAI:Endpoint")), "/openai/v1"); |
65 | 55 |
|
66 | | -#pragma warning disable OPENAI001 // GetOpenAIResponseClient(string) is experimental and subject to change or removal in future updates. |
67 | | -#endif |
68 | | -var chatClient = azureOpenAI.GetOpenAIResponseClient("gpt-4o-mini").AsIChatClient(); |
| 56 | +#if (IsManagedIdentity) |
| 57 | +#pragma warning disable OPENAI001 // The overload accepting an AuthenticationPolicy is experimental and may change or be removed in future releases. |
| 58 | +var chatClient = new ChatClient( |
| 59 | + "gpt-4o-mini", |
| 60 | + new BearerTokenPolicy(new DefaultAzureCredential(), "https://ai.azure.com/.default"), |
| 61 | + new OpenAIClientOptions { Endpoint = azureOpenAIEndpoint }) |
| 62 | + .AsIChatClient(); |
69 | 63 | #pragma warning restore OPENAI001 |
| 64 | +#else |
| 65 | +var chatClient = new ChatClient( |
| 66 | + "gpt-4o-mini", |
| 67 | + new ApiKeyCredential(builder.Configuration["AzureOpenAI:Key"] ?? throw new InvalidOperationException("Missing configuration: AzureOpenAI:Key")), |
| 68 | + new OpenAIClientOptions { Endpoint = azureOpenAIEndpoint }) |
| 69 | + .AsIChatClient(); |
| 70 | +#endif |
| 71 | +#elif (IsOllama) |
| 72 | +// You will need to have Ollama running locally with the llama3.2 model installed |
| 73 | +// Visit https://ollama.com for installation instructions |
| 74 | +var chatClient = new OllamaApiClient(new Uri("http://localhost:11434"), "llama3.2"); |
70 | 75 | #endif |
71 | 76 |
|
72 | 77 | builder.Services.AddChatClient(chatClient); |
|
76 | 81 | builder.AddAIAgent("editor", (sp, key) => new ChatClientAgent( |
77 | 82 | chatClient, |
78 | 83 | name: key, |
79 | | - instructions: "You edit short stories to improve grammar and style. You ensure the stories are less than 300 words.", |
80 | | - tools: [ AIFunctionFactory.Create(FormatStory) ] |
| 84 | + instructions: "You edit short stories to improve grammar and style, ensuring the stories are less than 300 words. Once finished editing, you select a title and format the story for publishing.", |
| 85 | + tools: [AIFunctionFactory.Create(FormatStory)] |
81 | 86 | )); |
82 | 87 |
|
83 | 88 | builder.AddWorkflow("publisher", (sp, key) => AgentWorkflowBuilder.BuildSequential( |
|
86 | 91 | sp.GetRequiredKeyedService<AIAgent>("editor") |
87 | 92 | )).AddAsAIAgent(); |
88 | 93 |
|
89 | | -var app = builder.Build(); |
| 94 | +// Register services for OpenAI responses and conversations (also required for DevUI) |
| 95 | +builder.Services.AddOpenAIResponses(); |
| 96 | +builder.Services.AddOpenAIConversations(); |
90 | 97 |
|
| 98 | +var app = builder.Build(); |
91 | 99 | app.UseHttpsRedirection(); |
| 100 | + |
| 101 | +// Map endpoints for OpenAI responses and conversations (also required for DevUI) |
92 | 102 | app.MapOpenAIResponses(); |
| 103 | +app.MapOpenAIConversations(); |
| 104 | + |
| 105 | +if (builder.Environment.IsDevelopment()) |
| 106 | +{ |
| 107 | + // Map DevUI endpoint to /devui |
| 108 | + app.MapDevUI(); |
| 109 | +} |
93 | 110 |
|
94 | 111 | app.Run(); |
95 | 112 |
|
96 | | -[Description("Formats the story for display.")] |
| 113 | +[Description("Formats the story for publication, revealing its title.")] |
97 | 114 | string FormatStory(string title, string story) => $""" |
98 | 115 | **Title**: {title} |
99 | | - **Date**: {DateTime.Today.ToShortDateString()} |
100 | 116 |
|
101 | 117 | {story} |
102 | 118 | """; |
0 commit comments