Skip to content

Some docs fixes #5861

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Feb 10, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,15 @@ public ChatMessage()
}

/// <summary>Initializes a new instance of the <see cref="ChatMessage"/> class.</summary>
/// <param name="role">Role of the author of the message.</param>
/// <param name="content">Content of the message.</param>
/// <param name="role">The role of the author of the message.</param>
/// <param name="content">The contents of the message.</param>
public ChatMessage(ChatRole role, string? content)
: this(role, content is null ? [] : [new TextContent(content)])
{
}

/// <summary>Initializes a new instance of the <see cref="ChatMessage"/> class.</summary>
/// <param name="role">Role of the author of the message.</param>
/// <param name="role">The role of the author of the message.</param>
/// <param name="contents">The contents for this message.</param>
public ChatMessage(
ChatRole role,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,21 +10,46 @@ namespace Microsoft.Extensions.AI;
public class ChatOptions
{
/// <summary>Gets or sets the temperature for generating chat responses.</summary>
/// <remarks>
/// This value controls the randomness of predictions made by the model. Use a lower value to decrease randomness in the response.
/// </remarks>
public float? Temperature { get; set; }

/// <summary>Gets or sets the maximum number of tokens in the generated chat response.</summary>
public int? MaxOutputTokens { get; set; }

/// <summary>Gets or sets the "nucleus sampling" factor (or "top p") for generating chat responses.</summary>
/// <remarks>
/// Nucleus sampling is an alternative to sampling with temperature where the model
/// considers the results of the tokens with <see cref="TopP"/> probability mass.
/// For example, 0.1 means only the tokens comprising the top 10% probability mass are considered.
/// </remarks>
public float? TopP { get; set; }

/// <summary>Gets or sets a count indicating how many of the most probable tokens the model should consider when generating the next part of the text.</summary>
/// <summary>
/// Gets or sets the number of most probable tokens that the model considers when generating the next part of the text.
/// </summary>
/// <remarks>
/// This property reduces the probability of generating nonsense. A higher value gives more diverse answers, while a lower value is more conservative.
/// </remarks>
public int? TopK { get; set; }

/// <summary>Gets or sets the frequency penalty for generating chat responses.</summary>
/// <summary>
/// Gets or sets the penalty for repeated tokens in chat responses proportional to how many times they've appeared.
/// </summary>
/// <remarks>
/// You can modify this value to reduce the repetitiveness of generated tokens. The higher the value, the stronger a penalty
/// is applied to previously present tokens, proportional to how many times they've already appeared in the prompt or prior generation.
/// </remarks>
public float? FrequencyPenalty { get; set; }

/// <summary>Gets or sets the presence penalty for generating chat responses.</summary>
/// <summary>
/// Gets or sets a value that influences the probability of generated tokens appearing based on their existing presence in generated text.
/// </summary>
/// <remarks>
/// You can modify this value to reduce repetitiveness of generated tokens. Similar to <see cref="FrequencyPenalty"/>,
/// except that this penalty is applied equally to all tokens that have already appeared, regardless of their exact frequencies.
/// </remarks>
public float? PresencePenalty { get; set; }

/// <summary>Gets or sets a seed value used by a service to control the reproducibility of results.</summary>
Expand All @@ -47,7 +72,12 @@ public class ChatOptions
/// <summary>Gets or sets the model ID for the chat request.</summary>
public string? ModelId { get; set; }

/// <summary>Gets or sets the stop sequences for generating chat responses.</summary>
/// <summary>
/// Gets or sets the list of stop sequences.
/// </summary>
/// <remarks>
/// After a stop sequence is detected, the model stops generating further tokens for chat responses.
/// </remarks>
public IList<string>? StopSequences { get; set; }

/// <summary>Gets or sets the tool mode for the chat request.</summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

namespace Microsoft.Extensions.AI;

/// <summary>A delegating chat client that wraps an inner client with implementations provided by delegates.</summary>
/// <summary>Represents a delegating chat client that wraps an inner client with implementations provided by delegates.</summary>
public sealed class AnonymousDelegatingChatClient : DelegatingChatClient
{
/// <summary>The delegate to use as the implementation of <see cref="CompleteAsync"/>.</summary>
Expand All @@ -40,7 +40,7 @@ public sealed class AnonymousDelegatingChatClient : DelegatingChatClient
/// used to perform the operation on the inner client. It will handle both the non-streaming and streaming cases.
/// </param>
/// <remarks>
/// This overload may be used when the anonymous implementation needs to provide pre- and/or post-processing, but doesn't
/// This overload may be used when the anonymous implementation needs to provide pre-processing and/or post-processing, but doesn't
/// need to interact with the results of the operation, which will come from the inner client.
/// </remarks>
/// <exception cref="ArgumentNullException"><paramref name="innerClient"/> is <see langword="null"/>.</exception>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
namespace Microsoft.Extensions.AI;

/// <summary>
/// A delegating chat client that caches the results of chat calls.
/// Represents a delegating chat client that caches the results of chat calls.
/// </summary>
public abstract class CachingChatClient : DelegatingChatClient
{
Expand All @@ -30,18 +30,18 @@ protected CachingChatClient(IChatClient innerClient)
{
}

/// <summary>Gets or sets a value indicating whether to coalesce streaming updates.</summary>
/// <remarks>
/// <summary>Gets or sets a value indicating whether streaming updates are coalesced.</summary>
/// <value>
/// <para>
/// When <see langword="true"/>, the client will attempt to coalesce contiguous streaming updates
/// into a single update, in order to reduce the number of individual items that are yielded on
/// subsequent enumerations of the cached data. When <see langword="false"/>, the updates are
/// <see langword="true"/> if the client attempts to coalesce contiguous streaming updates
/// into a single update, to reduce the number of individual items that are yielded on
/// subsequent enumerations of the cached data; <see langword="false"/> if the updates are
/// kept unaltered.
/// </para>
/// <para>
/// The default is <see langword="true"/>.
/// </para>
/// </remarks>
/// </value>
public bool CoalesceStreamingUpdates { get; set; } = true;

/// <inheritdoc />
Expand Down