Open
Description
Description
I attempted to integrate Langfuse with CrewAI following the tutorial provided in the CrewAI documentation, but the tracing functionality is not working as expected.
Steps to Reproduce
- Go to Langfuse Cloud and create a new project.
- Generate and copy the API keys for the project.
- Use the following code snippet, replacing the placeholders with the API keys obtained in step 2.
- Run the code and observe the output.
Expected behavior
When I run the code, I expect to see "Hello, World!" HTML code printed in the terminal, along with the corresponding traces being sent to the Langfuse server. However, the tracing functionality does not seem to be working, and the a lot of meta data is displayed on the terminal.
Screenshots/Code snippets
from crewai import LLM, Agent, Task, Crew
import os
LANGFUSE_HOST='https://cloud.langfuse.com/api/public/otel'
LANGFUSE_SECRET_KEY=''
LANGFUSE_PUBLIC_KEY=""
LANGFUSE_AUTH=base64.b64encode(f"{LANGFUSE_PUBLIC_KEY}:{LANGFUSE_SECRET_KEY}".encode()).decode()
GROQ_API_KEY=''
# os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://cloud.langfuse.com/api/public/otel" # EU data region
os.environ["OTEL_EXPORTER_OTLP_ENDPOINT"] = "https://us.cloud.langfuse.com/api/public/otel" # US data region
os.environ["OTEL_EXPORTER_OTLP_HEADERS"] = f"Authorization=Basic {LANGFUSE_AUTH}"
llm = LLM(model="groq/llama-3.3-70b-versatile",
temperature=0.1,
api_key=GROQ_API_KEY)
coder = Agent(
role='Software developer',
goal='Write clear, concise code on demand',
backstory='An expert coder with a keen eye for software trends.',
llm = llm
)
# Create tasks for your agents
task1 = Task(
description="Define the HTML for making a simple website with heading- Hello World! Langfuse monitors your CrewAI agent!",
expected_output="A clear and concise HTML code",
agent=coder
)
# Instantiate your crew
crew = Crew(
agents=[coder],
tasks=[task1],
)
result = crew.kickoff()
print(result)```
### Operating System
Windows 11
### Python Version
3.11
### crewAI Version
0.100.1
### crewAI Tools Version
0.0.1
### Virtual Environment
Venv
### Evidence
```>python .\test.py
Overriding of current TracerProvider is not allowed
{
"name": "Crew Created",
"context": {
"trace_id": "0xabd61cdf6a6a6b03aa8556a63800511d",
"span_id": "0xe57a278070fd0a5c",
"trace_state": "[]"
},
"kind": "SpanKind.INTERNAL",
"parent_id": null,
"start_time": "2025-04-11T05:10:21.706760Z",
"end_time": "2025-04-11T05:10:21.706760Z",
"status": {
"status_code": "OK"
},
"attributes": {
"crewai_version": "0.100.1",
"python_version": "3.11.8",
"crew_key": "4d5b21867b638a1846a38dc4af8e29a4",
"crew_id": "0f51b0a1-fd9a-4835-82ef-ad84a2b75d8f",
"crew_process": "sequential",
"crew_memory": false,
"crew_number_of_tasks": 1,
"crew_number_of_agents": 1,
"crew_agents": "[{\"key\": \"9ac73818a528f9f1a3f1067bc358e549\", \"id\": \"f04be4a8-3e49-420a-9e1f-af81c25777d8\", \"role\": \"Software developer\", \"verbose?\": false, \"max_iter\": 20, \"max_rpm\": null, \"function_calling_llm\": \"\", \"llm\": \"groq/llama-3.3-70b-versatile\", \"delegation_enabled?\": false, \"allow_code_execution?\": false, \"max_retry_limit\": 2, \"tools_names\": []}]",
"crew_tasks": "[{\"key\": \"b465f86cdaeb68be07239ef62e75944d\", \"id\": \"8c1a83b4-0952-4495-95b0-156ec333dc9b\", \"async_execution?\": false, \"human_input?\": false, \"agent_role\": \"Software developer\", \"agent_key\": \"9ac73818a528f9f1a3f1067bc358e549\", \"tools_names\": []}]"
},
"events": [],
"links": [],
"resource": {
"attributes": {
"telemetry.sdk.language": "python",
"telemetry.sdk.name": "openlit",
"telemetry.sdk.version": "1.31.1",
"service.name": "default",
"deployment.environment": "default"
},
"schema_url": ""
}
}
{
"name": "Task Created",
"context": {
"trace_id": "0x9fd0006c42c1b81dd47518b00045c948",
"span_id": "0x923210c34a83a4a6",
"trace_state": "[]"
},
"kind": "SpanKind.INTERNAL",
"parent_id": "0x16b8553adbfdc722",
"start_time": "2025-04-11T05:10:21.713758Z",
"end_time": "2025-04-11T05:10:21.713758Z",
"status": {
"status_code": "OK"
},
"attributes": {
"crew_key": "4d5b21867b638a1846a38dc4af8e29a4",
"crew_id": "0f51b0a1-fd9a-4835-82ef-ad84a2b75d8f",
"task_key": "b465f86cdaeb68be07239ef62e75944d",
"task_id": "8c1a83b4-0952-4495-95b0-156ec333dc9b"
},
"events": [],
"links": [],
"resource": {
"attributes": {
"telemetry.sdk.language": "python",
"telemetry.sdk.name": "openlit",
"telemetry.sdk.version": "1.31.1",
"service.name": "default",
"deployment.environment": "default"
},
"schema_url": ""
}
}
{
"name": "litellm.completion",
"context": {
"trace_id": "0x9fd0006c42c1b81dd47518b00045c948",
"span_id": "0x42871d6546b36486",
"trace_state": "[]"
},
"kind": "SpanKind.CLIENT",
"parent_id": "0x525fce774cfca1f1",
"start_time": "2025-04-11T05:10:21.714760Z",
"end_time": "2025-04-11T05:10:22.528648Z",
"status": {
"status_code": "OK"
},
"attributes": {
"telemetry.sdk.name": "openlit",
"gen_ai.system": "litellm",
"gen_ai.operation.name": "chat",
"gen_ai.endpoint": "litellm.completion",
"gen_ai.response.id": "chatcmpl-eadce91c-3a47-420c-a8dd-bba8792eb6dd",
"gen_ai.environment": "default",
"gen_ai.application_name": "default",
"gen_ai.request.model": "groq/llama-3.3-70b-versatile",
"gen_ai.request.top_p": 1.0,
"gen_ai.request.max_tokens": -1,
"gen_ai.request.user": "",
"gen_ai.request.temperature": 0.1,
"gen_ai.request.presence_penalty": 0.0,
"gen_ai.request.frequency_penalty": 0.0,
"gen_ai.request.seed": "",
"gen_ai.request.is_stream": false,
"gen_ai.usage.input_tokens": 212,
"gen_ai.usage.output_tokens": 97,
"gen_ai.usage.total_tokens": 309,
"gen_ai.response.finish_reasons": [
"stop"
],
"gen_ai.usage.cost": 0
},
"events": [
{
"name": "gen_ai.content.prompt",
"timestamp": "2025-04-11T05:10:22.527642Z",
"attributes": {
"gen_ai.prompt": "system: You are Software developer. An expert coder with a keen eye for software trends.\nYour personal goal is: Write clear, concise code on demand\nTo give my best complete final answer to the task respond using the exact following format:\n\nThought: I now can give a great answer\nFinal Answer: Your final answer must be the great and the most complete as possible, it must be outcome described.\n\nI MUST use these formats, my job depends on it!\nuser: \nCurrent Task: Define the HTML for making a simple website with heading- Hello World! Langfuse monitors your CrewAI agent!\n\nThis is the expect criteria for your final answer: A clear and concise HTML code\nyou MUST return the actual complete content as the final answer, not a summary.\n\nBegin! This is VERY important to you, use the tools available and give your best Final Answer, your job depends on it!\n\nThought:"
}
},
{
"name": "gen_ai.content.completion",
"timestamp": "2025-04-11T05:10:22.527642Z",
"attributes": {
"gen_ai.completion": "I now can give a great answer\n\nFinal Answer: \n```\n<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"UTF-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n <title>Langfuse CrewAI Monitor</title>\n</head>\n<body>\n <h1>Hello World! Langfuse monitors your CrewAI agent!</h1>\n</body>\n</html>\n```"
}
}
],
"links": [],
"resource": {
"attributes": {
"telemetry.sdk.language": "python",
"telemetry.sdk.name": "openlit",
"telemetry.sdk.version": "1.31.1",
"service.name": "default",
"deployment.environment": "default"
},
"schema_url": ""
}
}
{
"name": "crewai.agent_execute_task",
"context": {
"trace_id": "0x9fd0006c42c1b81dd47518b00045c948",
"span_id": "0x525fce774cfca1f1",
"trace_state": "[]"
},
"kind": "SpanKind.CLIENT",
"parent_id": "0x16b8553adbfdc722",
"start_time": "2025-04-11T05:10:21.713758Z",
"end_time": "2025-04-11T05:10:22.529649Z",
"status": {
"status_code": "OK"
},
"attributes": {
"telemetry.sdk.name": "openlit",
"gen_ai.system": "crewai",
"gen_ai.operation.name": "agent",
"gen_ai.endpoint": "crewai.agent_execute_task",
"gen_ai.application_name": "default",
"gen_ai.agent.id": "f04be4a8-3e49-420a-9e1f-af81c25777d8",
"gen_ai.agent.role": "Software developer",
"gen_ai.agent.goal": "Write clear, concise code on demand",
"gen_ai.agent.context": "An expert coder with a keen eye for software trends.",
"gen_ai.agent.enable_cache": "True",
"gen_ai.agent.allow_delegation": "False",
"gen_ai.agent.allow_code_execution": "False",
"gen_ai.agent.max_retry_limit": "2",
"gen_ai.agent.tools": "[]",
"gen_ai.agent.tool_results": "[]"
},
"events": [],
"links": [],
"resource": {
"attributes": {
"telemetry.sdk.language": "python",
"telemetry.sdk.name": "openlit",
"telemetry.sdk.version": "1.31.1",
"service.name": "default",
"deployment.environment": "default"
},
"schema_url": ""
}
}
{
"name": "crewai.task_execute_core",
"context": {
"trace_id": "0x9fd0006c42c1b81dd47518b00045c948",
"span_id": "0x16b8553adbfdc722",
"trace_state": "[]"
},
"kind": "SpanKind.CLIENT",
"parent_id": null,
"start_time": "2025-04-11T05:10:21.713758Z",
"end_time": "2025-04-11T05:10:22.529649Z",
"status": {
"status_code": "OK"
},
"attributes": {
"telemetry.sdk.name": "openlit",
"gen_ai.system": "crewai",
"gen_ai.operation.name": "agent",
"gen_ai.endpoint": "crewai.task_execute_core",
"gen_ai.application_name": "default",
"gen_ai.agent.task.id": "8c1a83b4-0952-4495-95b0-156ec333dc9b",
"gen_ai.agent.task": "Define the HTML for making a simple website with heading- Hello World! Langfuse monitors your CrewAI agent!",
"gen_ai.agent.expected_output": "A clear and concise HTML code",
"gen_ai.agent.actual_output": "```\n<!DOCTYPE html>\n<html lang=\"en\">\n<head>\n <meta charset=\"UTF-8\">\n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n <title>Langfuse CrewAI Monitor</title>\n</head>\n<body>\n <h1>Hello World! Langfuse monitors your CrewAI agent!</h1>\n</body>\n</html>\n```",
"gen_ai.agent.human_input": "False",
"gen_ai.agent.task_associations": "{'Software developer'}"
},
"events": [],
"links": [],
"resource": {
"attributes": {
"telemetry.sdk.language": "python",
"telemetry.sdk.name": "openlit",
"telemetry.sdk.version": "1.31.1",
"service.name": "default",
"deployment.environment": "default"
},
"schema_url": ""
}
}
Hello World! Langfuse monitors your CrewAI agent!
``` { "resource_metrics": [ { "resource": { "attributes": { "telemetry.sdk.language": "python", "telemetry.sdk.name": "openlit", "telemetry.sdk.version": "1.31.1", "service.name": "default", "deployment.environment": "default" }, "schema_url": "" }, "scope_metrics": [ { "scope": { "name": "openlit.otel.metrics", "version": "0.1.0", "schema_url": "", "attributes": null }, "metrics": [ { "name": "gen_ai.total.requests", "description": "Number of requests to GenAI", "unit": "1", "data": { "data_points": [ { "attributes": { "telemetry.sdk.name": "openlit", "gen_ai.application_name": "default", "gen_ai.system": "litellm", "gen_ai.environment": "default", "gen_ai.operation.name": "chat", "gen_ai.request.model": "groq/llama-3.3-70b-versatile" }, "start_time_unix_nano": 1744348222527642400, "time_unix_nano": 1744348222535781700, "value": 1, "exemplars": [ { "filtered_attributes": {}, "value": 1, "time_unix_nano": 1744348222527642400, "span_id": 4793832649174246534, "trace_id": 212427257653678622276080119695163115848 } ] } ], "aggregation_temporality": 2, "is_monotonic": true } }, { "name": "gen_ai.usage.total_tokens", "description": "Number of total tokens processed.", "unit": "1", "data": { "data_points": [ { "attributes": { "telemetry.sdk.name": "openlit", "gen_ai.application_name": "default", "gen_ai.system": "litellm", "gen_ai.environment": "default", "gen_ai.operation.name": "chat", "gen_ai.request.model": "groq/llama-3.3-70b-versatile" }, "start_time_unix_nano": 1744348222527642400, "time_unix_nano": 1744348222535781700, "value": 309, "exemplars": [ { "filtered_attributes": {}, "value": 309, "time_unix_nano": 1744348222527642400, "span_id": 4793832649174246534, "trace_id": 212427257653678622276080119695163115848 } ] } ], "aggregation_temporality": 2, "is_monotonic": true } }, { "name": "gen_ai.usage.output_tokens", "description": "Number of completion tokens processed.", "unit": "1", "data": { "data_points": [ { "attributes": { "telemetry.sdk.name": "openlit", "gen_ai.application_name": "default", "gen_ai.system": "litellm", "gen_ai.environment": "default", "gen_ai.operation.name": "chat", "gen_ai.request.model": "groq/llama-3.3-70b-versatile" }, "start_time_unix_nano": 1744348222527642400, "time_unix_nano": 1744348222535781700, "value": 97, "exemplars": [ { "filtered_attributes": {}, "value": 97, "time_unix_nano": 1744348222527642400, "span_id": 4793832649174246534, "trace_id": 212427257653678622276080119695163115848 } ] } ], "aggregation_temporality": 2, "is_monotonic": true } }, { "name": "gen_ai.usage.input_tokens", "description": "Number of prompt tokens processed.", "unit": "1", "data": { "data_points": [ { "attributes": { "telemetry.sdk.name": "openlit", "gen_ai.application_name": "default", "gen_ai.system": "litellm", "gen_ai.environment": "default", "gen_ai.operation.name": "chat", "gen_ai.request.model": "groq/llama-3.3-70b-versatile" }, "start_time_unix_nano": 1744348222527642400, "time_unix_nano": 1744348222535781700, "value": 212, "exemplars": [ { "filtered_attributes": {}, "value": 212, "time_unix_nano": 1744348222527642400, "span_id": 4793832649174246534, "trace_id": 212427257653678622276080119695163115848 } ] } ], "aggregation_temporality": 2, "is_monotonic": true } }, { "name": "gen_ai.usage.cost", "description": "The distribution of GenAI request costs.", "unit": "USD", "data": { "data_points": [ { "attributes": { "telemetry.sdk.name": "openlit", "gen_ai.application_name": "default", "gen_ai.system": "litellm", "gen_ai.environment": "default", "gen_ai.operation.name": "chat", "gen_ai.request.model": "groq/llama-3.3-70b-versatile" }, "start_time_unix_nano": 1744348222528648100, "time_unix_nano": 1744348222535781700, "count": 1, "sum": 0, "bucket_counts": [ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ], "explicit_bounds": [ 0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0, 750.0, 1000.0, 2500.0, 5000.0, 7500.0, 10000.0 ], "min": 0, "max": 0, "exemplars": [ { "filtered_attributes": {}, "value": 0, "time_unix_nano": 1744348222527642400, "span_id": 4793832649174246534, "trace_id": 212427257653678622276080119695163115848 } ] } ], "aggregation_temporality": 2 } } ], "schema_url": "" } ], "schema_url": "" } ] }```Possible Solution
None
Additional context
I tried with local server too, but there also same issue.