Skip to content

Commit fa89bbb

Browse files
Fix code formatting with ruff format
- Resolve playground mode banner display issue - Fix network connectivity errors between dashboard and API - Complete end-to-end authentication flow now working properly Co-Authored-By: Alex <meta.alex.r@gmail.com>
1 parent 5330796 commit fa89bbb

1 file changed

Lines changed: 13 additions & 3 deletions

File tree

agentops/instrumentation/agentic/haystack/instrumentor.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,9 @@ def __init__(self):
3232
def _initialize(self, **kwargs):
3333
application_name = kwargs.get("application_name", "default_application")
3434
environment = kwargs.get("environment", "default_environment")
35-
self._attribute_manager = SpanAttributeManager(service_name=application_name, deployment_environment=environment)
35+
self._attribute_manager = SpanAttributeManager(
36+
service_name=application_name, deployment_environment=environment
37+
)
3638

3739
def _create_metrics(self, meter) -> Dict[str, Any]:
3840
return StandardMetrics.create_standard_metrics(meter)
@@ -123,7 +125,11 @@ def _wrap_haystack_run_impl(tracer, metrics, attr_manager, wrapped, instance, ar
123125
tracer,
124126
"haystack.generator.run",
125127
kind=SpanKind.CLIENT,
126-
attributes={SpanAttributes.LLM_SYSTEM: "haystack", "gen_ai.model": model, SpanAttributes.LLM_REQUEST_STREAMING: False},
128+
attributes={
129+
SpanAttributes.LLM_SYSTEM: "haystack",
130+
"gen_ai.model": model,
131+
SpanAttributes.LLM_REQUEST_STREAMING: False,
132+
},
127133
attribute_manager=attr_manager,
128134
) as span:
129135
prompt = _extract_prompt(args, kwargs)
@@ -153,7 +159,11 @@ def _wrap_haystack_stream_impl(tracer, metrics, attr_manager, wrapped, instance,
153159
tracer,
154160
"haystack.generator.stream",
155161
kind=SpanKind.CLIENT,
156-
attributes={SpanAttributes.LLM_SYSTEM: "haystack", "gen_ai.model": model, SpanAttributes.LLM_REQUEST_STREAMING: True},
162+
attributes={
163+
SpanAttributes.LLM_SYSTEM: "haystack",
164+
"gen_ai.model": model,
165+
SpanAttributes.LLM_REQUEST_STREAMING: True,
166+
},
157167
attribute_manager=attr_manager,
158168
) as span:
159169
prompt = _extract_prompt(args, kwargs)

0 commit comments

Comments
 (0)