Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions content-gen/infra/main.bicep
Original file line number Diff line number Diff line change
Expand Up @@ -869,6 +869,8 @@ module containerInstance 'modules/container-instance.bicep' = {
{ name: 'AZURE_AI_PROJECT_ENDPOINT', value: aiFoundryAiProjectEndpoint }
{ name: 'AZURE_AI_MODEL_DEPLOYMENT_NAME', value: gptModelName }
{ name: 'AZURE_AI_IMAGE_MODEL_DEPLOYMENT', value: imageModelConfig[imageModelChoice].name }
// Application Insights
{ name: 'APPLICATIONINSIGHTS_CONNECTION_STRING', value: enableMonitoring ? applicationInsights!.outputs.connectionString : '' }
]
}
}
Expand Down
64 changes: 63 additions & 1 deletion content-gen/src/backend/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

from quart import Quart, request, jsonify, Response
from quart_cors import cors
from opentelemetry import trace

from settings import app_settings
from models import CreativeBrief, Product
Expand All @@ -24,6 +25,9 @@
from services.blob_service import get_blob_service
from services.title_service import get_title_service
from api.admin import admin_bp
from azure.core.settings import settings as azure_settings
from azure.monitor.opentelemetry import configure_azure_monitor
from opentelemetry.instrumentation.asgi import OpenTelemetryMiddleware

# In-memory task storage for generation tasks
# In production, this should be replaced with Redis or similar
Expand All @@ -34,17 +38,75 @@
level=logging.INFO,
format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logging.getLogger("azure.core.pipeline.policies.http_logging_policy").setLevel(logging.WARNING)
logger = logging.getLogger(__name__)

# Create Quart app
app = Quart(__name__)
app = cors(app, allow_origin="*")

# Check if the Application Insights connection string is set in the environment variables
appinsights_connection_string = os.getenv("APPLICATIONINSIGHTS_CONNECTION_STRING")
if appinsights_connection_string:
# Configure Application Insights if the connection string is found
configure_azure_monitor(
connection_string=appinsights_connection_string,
enable_live_metrics=False,
enable_performance_counters=False,
)
# Suppress verbose Azure SDK INFO logs from App Insights
# WARNING/ERROR/CRITICAL from these loggers still come through
logging.getLogger("azure.core.pipeline.policies.http_logging_policy").setLevel(logging.WARNING)
logging.getLogger("azure.monitor.opentelemetry.exporter").setLevel(logging.WARNING)
logging.getLogger("azure.identity").setLevel(logging.WARNING)
logging.getLogger("azure.cosmos").setLevel(logging.WARNING)
# Disable Azure SDK native span creation (ContainerProxy.*, BlobClient.* InProc spans)
azure_settings.tracing_implementation = None
# Apply ASGI middleware for request tracing (Quart is not auto-instrumented by configure_azure_monitor)
# Exclude health probes, post-deploy admin calls, and polling endpoints from telemetry
app.asgi_app = OpenTelemetryMiddleware(
app.asgi_app,
exclude_spans=["receive", "send"],
excluded_urls="health,api/generate/status",
)
logger.info("Application Insights configured with the provided connection string")
else:
# Log a warning if the connection string is not found
logger.warning("No Application Insights connection string found. Skipping configuration")

# Register blueprints
app.register_blueprint(admin_bp)


@app.before_request
async def set_conversation_context():
"""Attach conversation_id and user_id to the current OTel span for App Insights."""
conversation_id = ""
user_id = ""

# 1. Extract from JSON body (POST requests)
if request.content_type and "json" in request.content_type:
data = await request.get_json(silent=True)
if data and isinstance(data, dict):
conversation_id = data.get("conversation_id", "")
user_id = data.get("user_id", "")

# 2. Extract from URL path parameters (e.g. /api/conversations/<conversation_id>)
if not conversation_id and request.view_args:
conversation_id = request.view_args.get("conversation_id", "")

# 3. Extract from query parameters (e.g. ?conversation_id=xxx)
if not conversation_id:
conversation_id = request.args.get("conversation_id", "")

if not user_id:
user_id = request.args.get("user_id", "") or request.headers.get("X-Ms-Client-Principal-Id", "anonymous")

span = trace.get_current_span()
if span.is_recording():
span.set_attribute("conversation_id", conversation_id)
span.set_attribute("user_id", user_id)


# ==================== Authentication Helper ====================

def get_authenticated_user():
Expand Down
4 changes: 4 additions & 0 deletions content-gen/src/backend/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,10 @@ openai>=1.45.0
# HTTP Client (for Foundry direct API calls)
httpx>=0.27.0

# Monitoring / Telemetry
azure-monitor-opentelemetry>=1.6.0
opentelemetry-instrumentation-asgi>=0.48b0

# Data Validation
pydantic>=2.8.0
pydantic-settings>=2.4.0
Expand Down
Loading