Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 20 additions & 22 deletions nodejs/langchain/sample-agent/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -17,32 +17,20 @@ BEARER_TOKEN=
MCP_PLATFORM_ENDPOINT=
MCP_PLATFORM_AUTHENTICATION_SCOPE=

# Environment Settings (set NODE_ENV=production to enable JWT validation against Entra)
NODE_ENV=development
PORT=3978

# Enable to use observability exporter, default is false which means using console exporter
ENABLE_A365_OBSERVABILITY_EXPORTER=false
# Use by the sample to demo using custom token resolver and token cache when it is true, otherwise use the built-in AgenticTokenCache
Use_Custom_Resolver=true
# optional - set to enable observability logs, value can be 'info', 'warn', or 'error', default to 'none' if not set
A365_OBSERVABILITY_LOG_LEVEL=
# Sample Observability Options
# When true, the sample uses a custom token resolver + local cache;
# otherwise it relies on the built-in AgenticTokenCacheInstance.
Use_Custom_Resolver=false

# Environment Settings
NODE_ENV=development # Retrieve mcp servers from ToolingManifest

# Telemetry and Tracing Configuration
DEBUG=agents:*
AZURE_EXPERIMENTAL_ENABLE_ACTIVITY_SOURCE=true
AZURE_TRACING_GEN_AI_CONTENT_RECORDING_ENABLED=true
OPENAI_AGENTS_DISABLE_TRACING=false
OTEL_SDK_DISABLED=false
CONNECTION_STRING=

# Use Agentic Authentication rather than OBO
USE_AGENTIC_AUTH=false

# Service Connection Settings
# Service Connection Settings (stamped by `a365 setup all` against your Entra app)
connections__service_connection__settings__clientId=
connections__service_connection__settings__clientSecret=
connections__service_connection__settings__tenantId=
connections__service_connection__settings__scopes=

# Set service connection as default
connectionsMap__0__serviceUrl=*
Expand All @@ -51,4 +39,14 @@ connectionsMap__0__connection=service_connection
# AgenticAuthentication Options
agentic_type=agentic
agentic_altBlueprintConnectionName=service_connection
agentic_scopes=ea9ffc3e-8a23-4a7d-836d-234d7c7565c1/.default # Prod Agentic scope
agentic_scopes=https://graph.microsoft.com/.default
agentic_connectionName=AgenticAuthConnection

# Agent 365 Observability (stamped by `a365 setup all` for telemetry export)
agent365Observability__agentId=
agent365Observability__agentName=
agent365Observability__agentDescription=
agent365Observability__tenantId=
agent365Observability__agentBlueprintId=
agent365Observability__clientId=
agent365Observability__clientSecret=
3 changes: 2 additions & 1 deletion nodejs/langchain/sample-agent/env/.env.playground.user
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
SECRET_AZURE_OPENAI_API_KEY=
AZURE_OPENAI_ENDPOINT=
AZURE_OPENAI_DEPLOYMENT_NAME=
AZURE_OPENAI_API_VERSION=

# Option 2: OpenAI (if Azure OpenAI not configured)
# SECRET_OPENAI_API_KEY=
# OPENAI_MODEL=gpt-4o

SECRET_BEARER_TOKEN=
SECRET_BEARER_TOKEN=
1 change: 1 addition & 0 deletions nodejs/langchain/sample-agent/m365agents.playground.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ deploy:
AZURE_OPENAI_API_KEY: ${{SECRET_AZURE_OPENAI_API_KEY}}
AZURE_OPENAI_ENDPOINT: ${{AZURE_OPENAI_ENDPOINT}}
AZURE_OPENAI_DEPLOYMENT: ${{AZURE_OPENAI_DEPLOYMENT_NAME}}
AZURE_OPENAI_API_VERSION: ${{AZURE_OPENAI_API_VERSION}}
# OPENAI_API_KEY: ${{SECRET_OPENAI_API_KEY}}
BEARER_TOKEN: ${{SECRET_BEARER_TOKEN}}
USE_AGENTIC_AUTH: ${{USE_AGENTIC_AUTH}}
Expand Down
10 changes: 4 additions & 6 deletions nodejs/langchain/sample-agent/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,10 @@
"@langchain/langgraph": "^1.0.2",
"@langchain/mcp-adapters": "^1.0.0",
"@langchain/openai": "^1.0.2",
"@microsoft/agents-a365-notifications": "^0.2.0-preview.1",
"@microsoft/agents-a365-observability": "^0.2.0-preview.1",
"@microsoft/agents-a365-observability-hosting": "^0.2.0-preview.1",
"@microsoft/agents-a365-runtime": "^0.2.0-preview.1",
"@microsoft/agents-a365-tooling": "^0.2.0-preview.1",
"@microsoft/agents-a365-tooling-extensions-langchain": "^0.2.0-preview.1",
"@microsoft/agents-a365-notifications": "^1.0.0",
"@microsoft/agents-a365-runtime": "^1.0.0",
"@microsoft/agents-a365-tooling": "^1.0.0",
"@microsoft/agents-a365-tooling-extensions-langchain": "^1.0.0",
"@microsoft/agents-activity": "^1.2.2",
"@microsoft/agents-hosting": "^1.2.2",
"@microsoft/opentelemetry": "^1.0.0",
Expand Down
12 changes: 5 additions & 7 deletions nodejs/langchain/sample-agent/src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,7 @@ import { Activity, ActivityTypes } from '@microsoft/agents-activity';
import '@microsoft/agents-a365-notifications';
import { AgentNotificationActivity, NotificationType, createEmailResponseActivity } from '@microsoft/agents-a365-notifications';
// Observability Imports
import { BaggageBuilder } from '@microsoft/agents-a365-observability';
import { AgenticTokenCacheInstance, BaggageBuilderUtils } from '@microsoft/agents-a365-observability-hosting';
import { BaggageBuilder, AgenticTokenCacheInstance, BaggageBuilderUtils } from '@microsoft/opentelemetry';
import { getObservabilityAuthenticationScope } from '@microsoft/agents-a365-runtime';
import tokenCache, { createAgenticTokenCacheKey } from './token-cache';
import { Client, getClient } from './client';
Expand Down Expand Up @@ -78,7 +77,7 @@ export class A365Agent extends AgentApplication<TurnState> {

const baggageScope = BaggageBuilderUtils.fromTurnContext(
new BaggageBuilder(),
turnContext
turnContext as any
).sessionDescription('Initial onboarding session')
.build();

Expand Down Expand Up @@ -118,12 +117,11 @@ export class A365Agent extends AgentApplication<TurnState> {
const cacheKey = createAgenticTokenCacheKey(agentId, tenantId);
tokenCache.set(cacheKey, aauToken?.token || '');
} else {
await AgenticTokenCacheInstance.RefreshObservabilityToken(
await AgenticTokenCacheInstance.refreshObservabilityToken(
agentId,
tenantId,
turnContext,
this.authorization,
getObservabilityAuthenticationScope()
turnContext as any,
this.authorization as any
);
}
}
Expand Down
61 changes: 38 additions & 23 deletions nodejs/langchain/sample-agent/src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ import {
InferenceOperationType,
AgentDetails,
InferenceDetails,
Request,
} from '@microsoft/agents-a365-observability';
A365Request,
} from '@microsoft/opentelemetry';

export interface Client {
invokeInferenceScope(prompt: string): Promise<string>;
Expand All @@ -37,11 +37,14 @@ function createChatModel(): BaseChatModel {
// Check for Azure OpenAI configuration first
if (process.env.AZURE_OPENAI_API_KEY && process.env.AZURE_OPENAI_ENDPOINT && process.env.AZURE_OPENAI_DEPLOYMENT) {
console.log('Using Azure OpenAI');
const endpoint = process.env.AZURE_OPENAI_ENDPOINT.replace(/\/$/, '');
const deployment = process.env.AZURE_OPENAI_DEPLOYMENT;
const apiVersion = process.env.AZURE_OPENAI_API_VERSION || "2025-03-01-preview";
return new AzureChatOpenAI({
azureOpenAIApiKey: process.env.AZURE_OPENAI_API_KEY,
azureOpenAIApiInstanceName: process.env.AZURE_OPENAI_ENDPOINT?.replace('https://', '').replace('.openai.azure.com/', '').replace('.openai.azure.com', ''),
azureOpenAIApiDeploymentName: process.env.AZURE_OPENAI_DEPLOYMENT,
azureOpenAIApiVersion: process.env.AZURE_OPENAI_API_VERSION || "2025-03-01-preview",
azureOpenAIBasePath: `${endpoint}/openai/deployments`,
azureOpenAIApiDeploymentName: deployment,
azureOpenAIApiVersion: apiVersion,
temperature: 0,
});
}
Expand Down Expand Up @@ -152,7 +155,7 @@ class LangChainClient implements Client {
* @param {string} userMessage - The message or prompt to send to the agent.
* @returns {Promise<string>} The response from the agent, or an error message if the query fails.
*/
async invokeAgent(userMessage: string): Promise<string> {
async invokeAgent(userMessage: string): Promise<{ content: string; inputTokens: number; outputTokens: number; finishReason: string }> {
const result = await this.agent.invoke({
messages: [
{
Expand All @@ -162,33 +165,45 @@ class LangChainClient implements Client {
],
});

let agentMessage: any = '';
let content = '';
let inputTokens = 0;
let outputTokens = 0;
let finishReason = 'stop';

// Extract the content from the LangChain response
if (result.messages && result.messages.length > 0) {
// Sum usage_metadata across every AI message in the React loop so the
// manual InferenceScope reflects total work, matching the auto-instrumented invoke_agent span.
for (const msg of result.messages) {
const usage = (msg as any).usage_metadata;
if (usage) {
inputTokens += usage.input_tokens ?? 0;
outputTokens += usage.output_tokens ?? 0;
}
}
const lastMessage = result.messages[result.messages.length - 1];
agentMessage = lastMessage.content || "No content in response";
content = (lastMessage.content as string) || "No content in response";
finishReason = (lastMessage as any).response_metadata?.finish_reason ?? 'stop';
}

// Fallback if result is already a string
if (typeof result === 'string') {
agentMessage = result;
content = result;
}

if (!agentMessage) {
return "Sorry, I couldn't get a response from the agent :(";
if (!content) {
content = "Sorry, I couldn't get a response from the agent :(";
}

return agentMessage;
return { content, inputTokens, outputTokens, finishReason };
}

async invokeInferenceScope(prompt: string) {
const model = process.env.AZURE_OPENAI_DEPLOYMENT || process.env.OPENAI_MODEL || 'unknown';
const inferenceDetails: InferenceDetails = {
operationName: InferenceOperationType.CHAT,
model: "gpt-4o-mini",
model,
};
Comment on lines 199 to 204

const request: Request = {
const request: A365Request = {
conversationId: this.turnContext?.activity?.conversation?.id || `conv-${Date.now()}`,
};

Expand All @@ -202,13 +217,13 @@ class LangChainClient implements Client {
const scope = InferenceScope.start(request, inferenceDetails, agentDetails);
try {
await scope.withActiveSpanAsync(async () => {
response = await this.invokeAgent(prompt);
// Record the inference response with token usage
scope.recordOutputMessages([response]);
scope.recordInputMessages([prompt]);
scope.recordInputTokens(45);
scope.recordOutputTokens(78);
scope.recordFinishReasons(['stop']);
const result = await this.invokeAgent(prompt);
response = result.content;
scope.recordInputMessages([prompt]);
scope.recordOutputMessages([response]);
scope.recordInputTokens(result.inputTokens);
scope.recordOutputTokens(result.outputTokens);
scope.recordFinishReasons([result.finishReason]);
});
} catch (error) {
scope.recordError(error as Error);
Expand Down
4 changes: 2 additions & 2 deletions nodejs/langchain/sample-agent/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ configDotenv();
// Initialize Microsoft OpenTelemetry distro for observability.
// Must be called before importing other modules so instrumentations can patch libraries.
// See: https://github.com/microsoft/opentelemetry-distro-javascript
import { useMicrosoftOpenTelemetry } from '@microsoft/opentelemetry';
import { useMicrosoftOpenTelemetry, AgenticTokenCacheInstance } from '@microsoft/opentelemetry';
import { tokenResolver } from './token-cache';
import { AgenticTokenCacheInstance } from '@microsoft/agents-a365-observability-hosting';

useMicrosoftOpenTelemetry({
enableConsoleExporters: true,
a365: {
Comment on lines 15 to 17
enabled: true,
// When Use_Custom_Resolver is true the sample populates a local token cache;
Expand Down
Loading