Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions dotnet/agent-framework-dotnet.slnx
Original file line number Diff line number Diff line change
Expand Up @@ -313,6 +313,9 @@
<Folder Name="/Samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/">
<Project Path="samples/04-hosting/FoundryHostedAgents/responses/Hosted-McpTools/HostedMcpTools.csproj" />
</Folder>
<Folder Name="/Samples/04-hosting/FoundryHostedAgents/responses/Hosted-Observability/">
<Project Path="samples/04-hosting/FoundryHostedAgents/responses/Hosted-Observability/HostedObservability.csproj" />
</Folder>
<Folder Name="/Samples/04-hosting/FoundryHostedAgents/responses/Hosted-Toolbox/">
<Project Path="samples/04-hosting/FoundryHostedAgents/responses/Hosted-Toolbox/HostedToolbox.csproj" />
</Folder>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
AZURE_AI_PROJECT_ENDPOINT=<your-azure-ai-project-endpoint>
ASPNETCORE_URLS=http://+:8088
ASPNETCORE_ENVIRONMENT=Development
AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
AZURE_BEARER_TOKEN=DefaultAzureCredential

# Capture prompt / completion / tool argument content on GenAI spans.
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT=true

# Uncomment and set to send local-run telemetry to Application Insights.
# When the agent runs inside Foundry this value is injected automatically.
#APPLICATIONINSIGHTS_CONNECTION_STRING=<your-app-insights-connection-string>
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Use the official .NET 10.0 ASP.NET runtime as a parent image
FROM mcr.microsoft.com/dotnet/aspnet:10.0 AS base
WORKDIR /app

FROM mcr.microsoft.com/dotnet/sdk:10.0 AS build
WORKDIR /src
COPY . .
RUN dotnet restore
RUN dotnet publish -c Release -o /app/publish
Comment thread
rogerbarreto marked this conversation as resolved.

# Final stage
FROM base AS final
WORKDIR /app
COPY --from=build /app/publish .
EXPOSE 8088
ENV ASPNETCORE_URLS=http://+:8088
ENTRYPOINT ["dotnet", "HostedObservability.dll"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# Dockerfile for contributors building from the agent-framework repository source.
#
# This project uses ProjectReference to the local Microsoft.Agents.AI.Foundry source,
# which means a standard multi-stage Docker build cannot resolve dependencies outside
# this folder. Instead, pre-publish the app targeting the container runtime and copy
# the output into the container:
#
# dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
# docker build -f Dockerfile.contributor -t hosted-observability .
# docker run --rm -p 8088:8088 -e AGENT_NAME=hosted-observability -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN --env-file .env hosted-observability
#
# For end-users consuming the NuGet package (not ProjectReference), use the standard
# Dockerfile which performs a full dotnet restore + publish inside the container.
FROM mcr.microsoft.com/dotnet/aspnet:10.0-alpine AS final
WORKDIR /app
COPY out/ .
EXPOSE 8088
ENV ASPNETCORE_URLS=http://+:8088
ENTRYPOINT ["dotnet", "HostedObservability.dll"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
<Project Sdk="Microsoft.NET.Sdk.Web">

<PropertyGroup>
<TargetFrameworks>net10.0</TargetFrameworks>
Comment thread
rogerbarreto marked this conversation as resolved.
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<CentralPackageTransitivePinningEnabled>false</CentralPackageTransitivePinningEnabled>
<RootNamespace>HostedObservability</RootNamespace>
<AssemblyName>HostedObservability</AssemblyName>
<NoWarn>$(NoWarn);</NoWarn>
</PropertyGroup>

<ItemGroup>
<PackageReference Include="Azure.AI.Projects" VersionOverride="2.1.0-beta.1" />
<PackageReference Include="Azure.Identity" />
<PackageReference Include="DotNetEnv" />
</ItemGroup>

<!-- For contributors: uses ProjectReference to build against local source -->
<ItemGroup>
<ProjectReference Include="..\..\..\..\..\src\Microsoft.Agents.AI.Foundry\Microsoft.Agents.AI.Foundry.csproj" />
<ProjectReference Include="..\..\..\..\..\src\Microsoft.Agents.AI.Foundry.Hosting\Microsoft.Agents.AI.Foundry.Hosting.csproj" />
</ItemGroup>

<!-- For end-users: uncomment the PackageReference below and remove the ProjectReference above
<ItemGroup>
<PackageReference Include="Microsoft.Agents.AI.Foundry" Version="1.0.0" />
<PackageReference Include="Microsoft.Agents.AI.Foundry.Hosting" Version="1.0.0" />
</ItemGroup>
-->

</Project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
// Copyright (c) Microsoft. All rights reserved.

// Hosted Observability Agent - demonstrates that the Foundry hosting pipeline
// emits OpenTelemetry traces, metrics and logs with no extra wiring required.
// Two small tools are included so a request produces a span tree covering
// agent invocation, the chat call, and tool execution.

using System.ComponentModel;
using Azure.AI.Projects;
using Azure.Core;
using Azure.Identity;
using DotNetEnv;
using Microsoft.Agents.AI;
using Microsoft.Agents.AI.Foundry.Hosting;
using Microsoft.Extensions.AI;

// Load .env file if present (for local development)
Env.TraversePath().Load();

string endpoint = Environment.GetEnvironmentVariable("AZURE_AI_PROJECT_ENDPOINT")
?? throw new InvalidOperationException("AZURE_AI_PROJECT_ENDPOINT is not set.");
string deploymentName = Environment.GetEnvironmentVariable("AZURE_AI_MODEL_DEPLOYMENT_NAME") ?? "gpt-4o";

// Use a chained credential: try a temporary dev token first (for local Docker debugging),
// then fall back to DefaultAzureCredential (for local dev via dotnet run / managed identity in production).
TokenCredential credential = new ChainedTokenCredential(
new DevTemporaryTokenCredential(),
new DefaultAzureCredential());

// ── Tools ────────────────────────────────────────────────────────────────────

string[] locations = ["New York", "London", "Paris", "Tokyo"];
string[] conditions = ["sunny", "cloudy", "rainy", "stormy"];
Random rng = new();

[Description("Get the current location of the user.")]
string GetCurrentLocation() => locations[rng.Next(locations.Length)];

[Description("Get the weather for a given location.")]
string GetWeather(
[Description("The location to get the weather for.")] string location)
=> $"The weather in {location} is {conditions[rng.Next(conditions.Length)]} with a high of {rng.Next(10, 31)}°C.";
Comment thread
rogerbarreto marked this conversation as resolved.
Outdated

// ── Create and host the agent ────────────────────────────────────────────────
//
// AddFoundryResponses automatically wraps `agent` with OpenTelemetryAgent
// (see Microsoft.Agents.AI.Foundry.Hosting.ServiceCollectionExtensions.ApplyOpenTelemetry)
// and the OTLP exporter is registered by Azure.AI.AgentServer.Core's
// AddAgentHostTelemetry(). No additional observability wiring is required.

AIAgent agent = new AIProjectClient(new Uri(endpoint), credential)
.AsAIAgent(
model: deploymentName,
instructions: "You are a friendly assistant. Keep your answers brief.",
name: Environment.GetEnvironmentVariable("AGENT_NAME") ?? "hosted-observability",
description: "A hosted agent that demonstrates Foundry observability.",
tools: [
AIFunctionFactory.Create(GetCurrentLocation),
AIFunctionFactory.Create(GetWeather),
]);

var builder = WebApplication.CreateBuilder(args);
builder.Services.AddFoundryResponses(agent);

var app = builder.Build();
app.MapFoundryResponses();

if (app.Environment.IsDevelopment())
{
app.MapFoundryResponses("openai/v1");
}

app.Run();

/// <summary>
/// A <see cref="TokenCredential"/> for local Docker debugging only.
/// Reads a pre-fetched bearer token from the <c>AZURE_BEARER_TOKEN</c> environment variable
/// once at startup. This should NOT be used in production.
///
/// Generate a token on your host and pass it to the container:
/// export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
/// docker run -e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN ...
/// </summary>
internal sealed class DevTemporaryTokenCredential : TokenCredential
{
private const string EnvironmentVariable = "AZURE_BEARER_TOKEN";
private readonly string? _token;

public DevTemporaryTokenCredential()
{
this._token = Environment.GetEnvironmentVariable(EnvironmentVariable);
}

public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
=> this.GetAccessToken();

public override ValueTask<AccessToken> GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
=> new(this.GetAccessToken());

private AccessToken GetAccessToken()
{
if (string.IsNullOrEmpty(this._token) || this._token == "DefaultAzureCredential")
{
throw new CredentialUnavailableException($"{EnvironmentVariable} environment variable is not set.");
Comment thread
rogerbarreto marked this conversation as resolved.
}

return new AccessToken(this._token, DateTimeOffset.UtcNow.AddHours(1));
}
Comment thread
rogerbarreto marked this conversation as resolved.
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
# Hosted-Observability

A hosted [Agent Framework](https://github.com/microsoft/agent-framework) agent that demonstrates how the Foundry hosting pipeline emits OpenTelemetry traces, metrics and logs with no extra wiring.

The agent has two small tools, `GetCurrentLocation` and `GetWeather`, so an end-to-end run produces a span tree covering agent invocation, the underlying chat call, and tool execution.

## How it works

### Instrumentation is on by default

Unlike the Python SDK, the .NET hosting library is instrumented by default. `AddFoundryResponses(agent)` automatically wraps the agent with `OpenTelemetryAgent` (see `Microsoft.Agents.AI.Foundry.Hosting.ServiceCollectionExtensions.ApplyOpenTelemetry`) and the OTLP exporter pipeline is registered by `Azure.AI.AgentServer.Core`'s `AddAgentHostTelemetry()`. There is no `ENABLE_INSTRUMENTATION` flag to set.

### Sensitive content

Prompt, completion and tool argument content are omitted from spans by default. Set the OpenTelemetry standard environment variable to capture them:

```env
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT=true
```

This is the .NET equivalent of the Python sample's `ENABLE_SENSITIVE_DATA`. It is read by `OpenTelemetryAgent.EnableSensitiveData`.

### Where the telemetry goes

Foundry injects `APPLICATIONINSIGHTS_CONNECTION_STRING` when the agent runs in the hosted environment, so traces, metrics and logs flow to Application Insights with no code change. To send telemetry from a local run, set the connection string yourself in `.env`.

## Prerequisites

- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0)
- An Azure AI Foundry project with a deployed model (e.g., `gpt-4o`)
- Azure CLI logged in (`az login`)

## Configuration

```bash
cp .env.example .env
```

Edit `.env` and set your Azure AI Foundry project endpoint:

```env
AZURE_AI_PROJECT_ENDPOINT=https://<your-account>.services.ai.azure.com/api/projects/<your-project>
ASPNETCORE_URLS=http://+:8088
ASPNETCORE_ENVIRONMENT=Development
AZURE_AI_MODEL_DEPLOYMENT_NAME=gpt-4o
OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT=true
```

> **Note:** `.env` is gitignored. The `.env.example` template is checked in as a reference.

## Running directly (contributors)

```bash
cd dotnet/samples/04-hosting/FoundryHostedAgents/responses/Hosted-Observability
AGENT_NAME=hosted-observability dotnet run
```

The agent starts on `http://localhost:8088`.

### Test it

```bash
azd ai agent invoke --local "What is the current weather where I am?"
```

Or with curl:

```bash
curl -X POST http://localhost:8088/responses \
-H "Content-Type: application/json" \
-d '{"input": "What is the current weather where I am?", "model": "hosted-observability"}'
```

## Expected span tree

A single request produces approximately the following spans:

| Span | Source |
|------|--------|
| `invoke_agent` | Outer span emitted by the Azure AI AgentServer hosting SDK |
| `agent_invoke <name>` | Emitted by `OpenTelemetryAgent` for each agent invocation |
| `chat <model>` | Emitted by the underlying `IChatClient` for each model call |
| `execute_tool <tool>` | Emitted for each invocation of `GetCurrentLocation` / `GetWeather` |

See the [OpenTelemetry GenAI semantic conventions](https://opentelemetry.io/docs/specs/semconv/gen-ai/) for the attributes captured on each span.

## Running with Docker

This project uses `ProjectReference` to the local Agent Framework source, so use `Dockerfile.contributor` with a pre-published output:

```bash
dotnet publish -c Debug -f net10.0 -r linux-musl-x64 --self-contained false -o out
docker build -f Dockerfile.contributor -t hosted-observability .

export AZURE_BEARER_TOKEN=$(az account get-access-token --resource https://ai.azure.com --query accessToken -o tsv)
docker run --rm -p 8088:8088 \
-e AGENT_NAME=hosted-observability \
-e AZURE_BEARER_TOKEN=$AZURE_BEARER_TOKEN \
--env-file .env \
hosted-observability
```

## Deploying to Foundry and viewing traces

Once deployed, telemetry flows to the Application Insights instance attached to your Foundry project. In the Foundry UI, the **Traces** tab next to **Playground** lists conversations and lets you drill into the span tree for any request.

## NuGet package users

If consuming the Agent Framework as a NuGet package, use the standard `Dockerfile` instead of `Dockerfile.contributor`. See the commented section in `HostedObservability.csproj` for the `PackageReference` alternative.
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/AgentManifest.yaml
name: hosted-observability
displayName: "Hosted Observability Agent"

description: >
A hosted Agent Framework agent that demonstrates how the Foundry hosting
pipeline emits OpenTelemetry traces, metrics and logs to Application Insights
with no extra wiring required.

metadata:
tags:
- AI Agent Hosting
- Azure AI AgentServer
- Responses Protocol
- Observability
- OpenTelemetry
- Agent Framework

template:
name: hosted-observability
kind: hosted
protocols:
- protocol: responses
version: 1.0.0
resources:
cpu: "0.25"
memory: 0.5Gi
environment_variables:
# Capture prompt / completion / tool argument content on GenAI spans.
- name: OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT
value: "true"
parameters:
properties: []
resources: []
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
# yaml-language-server: $schema=https://raw.githubusercontent.com/microsoft/AgentSchema/refs/heads/main/schemas/v1.0/ContainerAgent.yaml
Comment thread
rogerbarreto marked this conversation as resolved.
kind: hosted
name: hosted-observability
protocols:
- protocol: responses
version: 1.0.0
resources:
cpu: "0.25"
memory: 0.5Gi
Comment thread
rogerbarreto marked this conversation as resolved.
environment_variables:
# Capture prompt / completion / tool argument content on GenAI spans.
# See https://opentelemetry.io/docs/specs/semconv/gen-ai/ for the standard env var.
- name: OTEL_INSTRUMENTATION_GENAI_CAPTURE_MESSAGE_CONTENT
value: "true"
Loading