Skip to content

Commit e8fdc53

Browse files
authored
Add log and metrics provider to langchain (#4214)
* Added log and metrics provider to langchain * updated changelog * added line * deleted handler * removed multiple cassettes * fixed ruff * fixed spellcheck
1 parent 8c3cc85 commit e8fdc53

10 files changed

Lines changed: 550 additions & 460 deletions

File tree

.codespellrc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
[codespell]
22
# skipping auto generated folders
3-
skip = ./.tox,./.mypy_cache,./docs/_build,./target,*/LICENSE,./venv
3+
skip = ./.tox,./.mypy_cache,./docs/_build,./target,*/LICENSE,./venv,*/cassettes
44
ignore-words-list = ot

instrumentation-genai/opentelemetry-instrumentation-langchain/CHANGELOG.md

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
77

88
## Unreleased
99

10-
- Added support to call genai utils handler for langchain LLM invocations.
11-
([https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3889](#3889))
12-
1310
- Added span support for genAI langchain llm invocation.
14-
([#3665](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3665))
11+
([#3665](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3665))
12+
- Added support to call genai utils handler for langchain LLM invocations.
13+
([#3889](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/3889))
14+
- Added log and metrics provider to langchain genai utils handler
15+
([#4214](https://github.com/open-telemetry/opentelemetry-python-contrib/pull/4214))

instrumentation-genai/opentelemetry-instrumentation-langchain/examples/manual/main.py

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,21 @@
11
from langchain_core.messages import HumanMessage, SystemMessage
22
from langchain_openai import ChatOpenAI
33

4-
from opentelemetry import trace
4+
from opentelemetry import _logs, metrics, trace
5+
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import (
6+
OTLPLogExporter,
7+
)
8+
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import (
9+
OTLPMetricExporter,
10+
)
511
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import (
612
OTLPSpanExporter,
713
)
814
from opentelemetry.instrumentation.langchain import LangChainInstrumentor
15+
from opentelemetry.sdk._logs import LoggerProvider
16+
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor
17+
from opentelemetry.sdk.metrics import MeterProvider
18+
from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
919
from opentelemetry.sdk.trace import TracerProvider
1020
from opentelemetry.sdk.trace.export import BatchSpanProcessor
1121

@@ -14,6 +24,23 @@
1424
span_processor = BatchSpanProcessor(OTLPSpanExporter())
1525
trace.get_tracer_provider().add_span_processor(span_processor)
1626

27+
# configure logging
28+
_logs.set_logger_provider(LoggerProvider())
29+
_logs.get_logger_provider().add_log_record_processor(
30+
BatchLogRecordProcessor(OTLPLogExporter())
31+
)
32+
33+
# configure metrics
34+
metrics.set_meter_provider(
35+
MeterProvider(
36+
metric_readers=[
37+
PeriodicExportingMetricReader(
38+
OTLPMetricExporter(),
39+
),
40+
]
41+
)
42+
)
43+
1744

1845
def main():
1946
# Set up instrumentation

instrumentation-genai/opentelemetry-instrumentation-langchain/src/opentelemetry/instrumentation/langchain/__init__.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -70,12 +70,16 @@ def _instrument(self, **kwargs: Any):
7070
Enable Langchain instrumentation.
7171
"""
7272
tracer_provider = kwargs.get("tracer_provider")
73+
meter_provider = kwargs.get("meter_provider")
74+
logger_provider = kwargs.get("logger_provider")
7375

7476
telemetry_handler = get_telemetry_handler(
75-
tracer_provider=tracer_provider
77+
tracer_provider=tracer_provider,
78+
meter_provider=meter_provider,
79+
logger_provider=logger_provider,
7680
)
7781
otel_callback_handler = OpenTelemetryLangChainCallbackHandler(
78-
telemetry_handler=telemetry_handler
82+
telemetry_handler=telemetry_handler,
7983
)
8084

8185
wrap_function_wrapper(
@@ -89,6 +93,14 @@ def _uninstrument(self, **kwargs: Any):
8993
Cleanup instrumentation (unwrap).
9094
"""
9195
unwrap("langchain_core.callbacks.base.BaseCallbackManager", "__init__")
96+
# Clear the TelemetryHandler singleton so the next instrument() uses
97+
# the provided tracer_provider/meter_provider/logger_provider instead
98+
# of reusing the previous handler.
99+
if (
100+
getattr(get_telemetry_handler, "_default_handler", None)
101+
is not None
102+
):
103+
delattr(get_telemetry_handler, "_default_handler")
92104

93105

94106
class _BaseCallbackManagerInitWrapper:

instrumentation-genai/opentelemetry-instrumentation-langchain/tests/cassettes/test_chat_openai_gpt_3_5_turbo_model_llm_call[NO_CONTENT].yaml renamed to instrumentation-genai/opentelemetry-instrumentation-langchain/tests/cassettes/test_chat_openai_gpt_3_5_turbo_model_llm_call.yaml

Lines changed: 17 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ interactions:
4040
Host:
4141
- api.openai.com
4242
User-Agent:
43-
- OpenAI/Python 2.16.0
43+
- OpenAI/Python 2.21.0
4444
X-Stainless-Arch:
4545
- arm64
4646
X-Stainless-Async:
@@ -50,7 +50,7 @@ interactions:
5050
X-Stainless-OS:
5151
- MacOS
5252
X-Stainless-Package-Version:
53-
- 2.16.0
53+
- 2.21.0
5454
X-Stainless-Raw-Response:
5555
- 'true'
5656
X-Stainless-Runtime:
@@ -59,8 +59,6 @@ interactions:
5959
- 3.13.5
6060
authorization:
6161
- Bearer test_openai_api_key
62-
cookie:
63-
- test_cookie
6462
x-stainless-retry-count:
6563
- '0'
6664
method: POST
@@ -69,9 +67,9 @@ interactions:
6967
body:
7068
string: |-
7169
{
72-
"id": "chatcmpl-D5js6XcubHki2LoPJ6cG4o0tPn8LV",
70+
"id": "chatcmpl-DB5VKnX0DBvthDkYqZDgfZVAqhZUk",
7371
"object": "chat.completion",
74-
"created": 1770260342,
72+
"created": 1771535138,
7573
"model": "gpt-3.5-turbo-0125",
7674
"choices": [
7775
{
@@ -106,13 +104,13 @@ interactions:
106104
}
107105
headers:
108106
CF-RAY:
109-
- 9c8f263d4d5fa6c4-SJC
107+
- 9d08b930cee533ed-SJC
110108
Connection:
111109
- keep-alive
112110
Content-Type:
113111
- application/json
114112
Date:
115-
- Thu, 05 Feb 2026 02:59:02 GMT
113+
- Thu, 19 Feb 2026 21:05:38 GMT
116114
Server:
117115
- cloudflare
118116
Set-Cookie: test_set_cookie
@@ -132,27 +130,31 @@ interactions:
132130
- '822'
133131
openai-organization: test_openai_org_id
134132
openai-processing-ms:
135-
- '195'
133+
- '170'
136134
openai-project:
137-
- proj_3o0Aqh32nPiGbrex8BJtPTCm
135+
- proj_GLiYlAc06hF0Fm06IMReZLy4
138136
openai-version:
139137
- '2020-10-01'
138+
set-cookie:
139+
- __cf_bm=tYjXaOj8KokJGc56XWDShqpm2zPWeq3Q19yhnJcqKqU-1771535137.4012983-1.0.1.1-k50ojuWx4sSPmgdu8QLKD3OmGYDMjKIDSqQcANN1wo1SBWNjIE.yAsOYOgaalvDiPrXObJN80xhnlcC9mXOXnOfGb9HRyT5un2ASAG29wbujzo_b9a3zQg9hHU024Nkz;
140+
HttpOnly; Secure; Path=/; Domain=api.openai.com; Expires=Thu, 19 Feb 2026
141+
21:35:38 GMT
140142
x-openai-proxy-wasm:
141143
- v0.1
142144
x-ratelimit-limit-requests:
143145
- '10000'
144146
x-ratelimit-limit-tokens:
145-
- '10000000'
147+
- '200000'
146148
x-ratelimit-remaining-requests:
147149
- '9999'
148150
x-ratelimit-remaining-tokens:
149-
- '9999981'
151+
- '199982'
150152
x-ratelimit-reset-requests:
151-
- 6ms
153+
- 8.64s
152154
x-ratelimit-reset-tokens:
153-
- 0s
155+
- 5ms
154156
x-request-id:
155-
- req_11889cdfe9f548169adad4e04412b63c
157+
- req_5f037f577c544db292661f8bbf12d569
156158
status:
157159
code: 200
158160
message: OK

instrumentation-genai/opentelemetry-instrumentation-langchain/tests/cassettes/test_chat_openai_gpt_3_5_turbo_model_llm_call[SPAN_ONLY].yaml

Lines changed: 0 additions & 157 deletions
This file was deleted.

0 commit comments

Comments
 (0)