Skip to content

Instantly share code, notes, and snippets.

@franperezlopez
Last active February 13, 2024 09:11
Show Gist options
  • Save franperezlopez/b499cad86bfa7acc048d31cdf7ab1a76 to your computer and use it in GitHub Desktop.
Save franperezlopez/b499cad86bfa7acc048d31cdf7ab1a76 to your computer and use it in GitHub Desktop.
Open Telemetry for Langchain
Display the source blob
Display the rendered blob
Raw
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"# import dependencies\n",
"from opentelemetry import trace as trace_api\n",
"from opentelemetry.sdk import trace as trace_sdk\n",
"from opentelemetry.sdk.trace.export import BatchSpanProcessor\n",
"from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter\n",
"\n",
"from azure.monitor.opentelemetry.exporter import AzureMonitorTraceExporter\n",
"from openinference.instrumentation.langchain import LangChainInstrumentor"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"APPLICATIONINSIGHTS_CONNECTION_STRING = os.getenv('APPLICATIONINSIGHTS_CONNECTION_STRING')\n",
"AOAI_API_KEY = os.getenv('AOAI_API_KEY')\n",
"AOAI_DEPLOYMENT_GPT3 = os.getenv('AOAI_DEPLOYMENT_GPT3')\n",
"AOAI_API_VERSION = os.getenv('AOAI_API_VERSION')\n",
"AOAI_ENDPOINT = os.getenv('AOAI_ENDPOINT')\n",
"APPLICATIONINSIGHTS_ENABLED = True\n",
"ARIZEPHOENIX_ENABLED = True"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [],
"source": [
"from langchain_openai import AzureChatOpenAI\n",
"llm = AzureChatOpenAI(api_key=AOAI_API_KEY, azure_deployment=AOAI_DEPLOYMENT_GPT3, \n",
" azure_endpoint=AOAI_ENDPOINT, api_version=AOAI_API_VERSION)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"# open telemetry setup\n",
"tracer_provider = trace_sdk.TracerProvider()\n",
"trace_api.set_tracer_provider(tracer_provider=tracer_provider)\n",
"if APPLICATIONINSIGHTS_ENABLED:\n",
" # configure Azure Monitor exporter\n",
" azure_exporter = AzureMonitorTraceExporter(connection_string=APPLICATIONINSIGHTS_CONNECTION_STRING)\n",
" tracer_provider.add_span_processor(BatchSpanProcessor(span_exporter=azure_exporter))\n",
"if ARIZEPHOENIX_ENABLED:\n",
" # configure Arize Phoenix exporter (launch phoenix from code/cli/docker)\n",
" # docker run -p 6006:6006 arizephoenix/phoenix\n",
" span_otlp_exporter = OTLPSpanExporter(endpoint=\"http://127.0.0.1:6006/v1/traces\")\n",
" tracer_provider.add_span_processor(BatchSpanProcessor(span_exporter=span_otlp_exporter))"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [],
"source": [
"# instruments langchain\n",
"LangChainInstrumentor().instrument()"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Why did the bicycle fall over? It was two-tired!"
]
}
],
"source": [
"# operate with langchain as usual\n",
"for chunk in llm.stream(\"Tell me a joke about bikes.\"):\n",
" print(chunk.content, end=\"\", flush=True)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "snippets",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
langchain_openai
opentelemetry-sdk
opentelemetry-exporter-otlp
openinference-instrumentation-langchain
azure-monitor-opentelemetry
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment