Skip to content

Commit 073516f

Browse files
authored
Update to M.E.AI 9.3.0-preview.1.25161.3 (#7414)
1 parent c36975c commit 073516f

File tree

7 files changed

+20
-19
lines changed

7 files changed

+20
-19
lines changed

eng/Versions.props

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@
4646
<MicrosoftDotNetInteractiveVersion>1.0.0-beta.24375.2</MicrosoftDotNetInteractiveVersion>
4747
<MicrosoftMLOnnxRuntimeVersion>1.18.1</MicrosoftMLOnnxRuntimeVersion>
4848
<MlNetMklDepsVersion>0.0.0.12</MlNetMklDepsVersion>
49-
<MicrosoftExtensionsAIVersion>9.3.0-preview.1.25114.11</MicrosoftExtensionsAIVersion>
49+
<MicrosoftExtensionsAIVersion>9.3.0-preview.1.25161.3</MicrosoftExtensionsAIVersion>
5050
<!-- runtime.native.System.Data.SqlClient.sni is not updated by dependency flow as it is not produced live anymore. -->
5151
<RuntimeNativeSystemDataSqlClientSniVersion>4.4.0</RuntimeNativeSystemDataSqlClientSniVersion>
5252
<!--

src/Microsoft.ML.GenAI.Core/CausalLMPipelineChatClient.cs

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,9 @@ public CausalLMPipelineChatClient(
3333
_pipeline = pipeline;
3434
}
3535

36-
public virtual Task<ChatResponse> GetResponseAsync(IList<ChatMessage> chatMessages, ChatOptions? options = null, CancellationToken cancellationToken = default)
36+
public virtual Task<ChatResponse> GetResponseAsync(IEnumerable<ChatMessage> messages, ChatOptions? options = null, CancellationToken cancellationToken = default)
3737
{
38-
var prompt = _chatTemplateBuilder.BuildPrompt(chatMessages, options);
38+
var prompt = _chatTemplateBuilder.BuildPrompt(messages, options);
3939
var stopSequences = options?.StopSequences ?? Array.Empty<string>();
4040

4141
var output = _pipeline.Generate(
@@ -49,30 +49,31 @@ public virtual Task<ChatResponse> GetResponseAsync(IList<ChatMessage> chatMessag
4949
{
5050
CreatedAt = DateTime.UtcNow,
5151
FinishReason = ChatFinishReason.Stop,
52+
ResponseId = Guid.NewGuid().ToString("N"),
5253
});
5354
}
5455

5556
#pragma warning disable CS1998 // Async method lacks 'await' operators and will run synchronously
5657
public virtual async IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
5758
#pragma warning restore CS1998 // Async method lacks 'await' operators and will run synchronously
58-
IList<ChatMessage> chatMessages,
59+
IEnumerable<ChatMessage> messages,
5960
ChatOptions? options = null,
6061
[EnumeratorCancellation] CancellationToken cancellationToken = default)
6162
{
62-
var prompt = _chatTemplateBuilder.BuildPrompt(chatMessages, options);
63+
var prompt = _chatTemplateBuilder.BuildPrompt(messages, options);
6364
var stopSequences = options?.StopSequences ?? Array.Empty<string>();
6465

66+
string responseId = Guid.NewGuid().ToString("N");
6567
foreach (var output in _pipeline.GenerateStreaming(
6668
prompt,
6769
maxLen: options?.MaxOutputTokens ?? 1024,
6870
temperature: options?.Temperature ?? 0.7f,
6971
stopSequences: stopSequences.ToArray()))
7072
{
71-
yield return new ChatResponseUpdate
73+
yield return new(ChatRole.Assistant, output)
7274
{
73-
Role = ChatRole.Assistant,
74-
Text = output,
7575
CreatedAt = DateTime.UtcNow,
76+
ResponseId = responseId,
7677
};
7778
}
7879
}

src/Microsoft.ML.GenAI.Core/Utility/IChatTemplateBuilder.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ public interface IMEAIChatTemplateBuilder
3232
/// <param name="options"></param>
3333
/// <param name="appendAssistantTag">true if append assistant tag at the end of prompt.</param>
3434
/// <returns></returns>
35-
string BuildPrompt(IList<ChatMessage> messages, ChatOptions? options = null, bool appendAssistantTag = true);
35+
string BuildPrompt(IEnumerable<ChatMessage> messages, ChatOptions? options = null, bool appendAssistantTag = true);
3636
}
3737

3838
public interface IChatTemplateBuilder : IAutoGenChatTemplateBuilder, ISemanticKernelChatTemplateBuilder

src/Microsoft.ML.GenAI.LLaMA/Llama3CausalLMChatClient.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ public Llama3CausalLMChatClient(
2525
}
2626

2727
public override Task<ChatResponse> GetResponseAsync(
28-
IList<ChatMessage> chatMessages,
28+
IEnumerable<ChatMessage> messages,
2929
ChatOptions? options = null,
3030
CancellationToken cancellationToken = default)
3131
{
@@ -40,18 +40,18 @@ public override Task<ChatResponse> GetResponseAsync(
4040
options.StopSequences = new List<string> { _eotToken };
4141
}
4242

43-
return base.GetResponseAsync(chatMessages, options, cancellationToken);
43+
return base.GetResponseAsync(messages, options, cancellationToken);
4444
}
4545

4646
public override IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
47-
IList<ChatMessage> chatMessages,
47+
IEnumerable<ChatMessage> messages,
4848
ChatOptions? options = null,
4949
CancellationToken cancellationToken = default)
5050
{
5151
options ??= new ChatOptions();
5252
options.StopSequences ??= [];
5353
options.StopSequences.Add(_eotToken);
5454

55-
return base.GetStreamingResponseAsync(chatMessages, options, cancellationToken);
55+
return base.GetStreamingResponseAsync(messages, options, cancellationToken);
5656
}
5757
}

src/Microsoft.ML.GenAI.LLaMA/Llama3_1ChatTemplateBuilder.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ public string BuildPrompt(ChatHistory chatHistory)
8888
return sb.ToString();
8989
}
9090

91-
public string BuildPrompt(IList<ChatMessage> messages, ChatOptions? options = null, bool appendAssistantTag = true)
91+
public string BuildPrompt(IEnumerable<ChatMessage> messages, ChatOptions? options = null, bool appendAssistantTag = true)
9292
{
9393
var availableRoles = new[] { ChatRole.System, ChatRole.User, ChatRole.Assistant };
9494
if (messages.Any(m => m.Text is null))

src/Microsoft.ML.GenAI.Phi/Phi3/Phi3CausalLMChatClient.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ public Phi3CausalLMChatClient(
3030
}
3131

3232
public override Task<ChatResponse> GetResponseAsync(
33-
IList<ChatMessage> chatMessages,
33+
IEnumerable<ChatMessage> messages,
3434
ChatOptions? options = null,
3535
CancellationToken cancellationToken = default)
3636
{
@@ -45,18 +45,18 @@ public override Task<ChatResponse> GetResponseAsync(
4545
options.StopSequences = [_eotToken];
4646
}
4747

48-
return base.GetResponseAsync(chatMessages, options, cancellationToken);
48+
return base.GetResponseAsync(messages, options, cancellationToken);
4949
}
5050

5151
public override IAsyncEnumerable<ChatResponseUpdate> GetStreamingResponseAsync(
52-
IList<ChatMessage> chatMessages,
52+
IEnumerable<ChatMessage> messages,
5353
ChatOptions? options = null,
5454
CancellationToken cancellationToken = default)
5555
{
5656
options ??= new ChatOptions();
5757
options.StopSequences ??= [];
5858
options.StopSequences.Add(_eotToken);
5959

60-
return base.GetStreamingResponseAsync(chatMessages, options, cancellationToken);
60+
return base.GetStreamingResponseAsync(messages, options, cancellationToken);
6161
}
6262
}

src/Microsoft.ML.GenAI.Phi/Phi3/Phi3ChatTemplateBuilder.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ public string BuildPrompt(ChatHistory chatHistory)
8989
return sb.ToString();
9090
}
9191

92-
public string BuildPrompt(IList<ChatMessage> messages, ChatOptions? options = null, bool appendAssistantTag = true)
92+
public string BuildPrompt(IEnumerable<ChatMessage> messages, ChatOptions? options = null, bool appendAssistantTag = true)
9393
{
9494
var availableRoles = new[] { ChatRole.System, ChatRole.User, ChatRole.Assistant };
9595
if (messages.Any(m => m.Text is null))

0 commit comments

Comments
 (0)