Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,8 @@ push-test-agent: buildx-create build-kagent-adk
$(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/kebab:latest -f go/test/e2e/agents/kebab/Dockerfile ./go/test/e2e/agents/kebab
kubectl apply --namespace kagent --context kind-$(KIND_CLUSTER_NAME) -f go/test/e2e/agents/kebab/agent.yaml
$(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/poem-flow:latest -f python/samples/crewai/poem_flow/Dockerfile ./python

$(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/basic-openai:latest -f python/samples/openai/basic_agent/Dockerfile ./python

.PHONY: push-test-skill
push-test-skill: buildx-create
echo "Building FROM DOCKER_REGISTRY=$(DOCKER_REGISTRY)/$(DOCKER_REPO)/kebab-maker:$(VERSION)"
Expand Down
106 changes: 104 additions & 2 deletions go/test/e2e/invoke_api_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -507,8 +507,57 @@ func TestE2EInvokeDeclarativeAgentWithMcpServerTool(t *testing.T) {
})
}

// This function generates a CrewAI agent that uses a mock LLM server
// Assumes that the image is built and pushed to registry, the agent can be found in python/samples/crewai/poem_flow
// This function generates an OpenAI BYO agent that uses a mock LLM server
// Assumes that the image is built and pushed to registry
func generateOpenAIAgent(baseURL string) *v1alpha2.Agent {
return &v1alpha2.Agent{
ObjectMeta: metav1.ObjectMeta{
Name: "basic-openai-test-agent",
Namespace: "kagent",
},
Spec: v1alpha2.AgentSpec{
Description: "A basic OpenAI agent with calculator and weather tools",
Type: v1alpha2.AgentType_BYO,
BYO: &v1alpha2.BYOAgentSpec{
Deployment: &v1alpha2.ByoDeploymentSpec{
Image: "localhost:5001/basic-openai:latest",
SharedDeploymentSpec: v1alpha2.SharedDeploymentSpec{
Env: []corev1.EnvVar{
{
Name: "OPENAI_API_KEY",
ValueFrom: &corev1.EnvVarSource{
SecretKeyRef: &corev1.SecretKeySelector{
LocalObjectReference: corev1.LocalObjectReference{
Name: "kagent-openai",
},
Key: "OPENAI_API_KEY",
},
},
},
{
Name: "OPENAI_API_BASE",
Value: baseURL + "/v1",
},
{
Name: "KAGENT_NAME",
Value: "basic-openai-test-agent",
},
{
Name: "KAGENT_NAMESPACE",
ValueFrom: &corev1.EnvVarSource{
FieldRef: &corev1.ObjectFieldSelector{
FieldPath: "metadata.namespace",
},
},
},
},
},
},
},
},
}
}

func generateCrewAIAgent(baseURL string) *v1alpha2.Agent {
return &v1alpha2.Agent{
ObjectMeta: metav1.ObjectMeta{
Expand Down Expand Up @@ -547,6 +596,59 @@ func generateCrewAIAgent(baseURL string) *v1alpha2.Agent {
}
}

func TestE2EInvokeOpenAIAgent(t *testing.T) {
// Setup mock server
baseURL, stopServer := setupMockServer(t, "mocks/invoke_openai_agent.json")
defer stopServer()

// Setup Kubernetes client
cli := setupK8sClient(t, false)

// Setup specific resources
modelCfg := setupModelConfig(t, cli, baseURL)
agent := generateOpenAIAgent(baseURL)

// Create the agent on the cluster
err := cli.Create(t.Context(), agent)
require.NoError(t, err)

// Wait for agent to be ready
args := []string{
"wait",
"--for",
"condition=Ready",
"--timeout=1m",
"agents.kagent.dev",
agent.Name,
"-n",
agent.Namespace,
}

cmd := exec.CommandContext(t.Context(), "kubectl", args...)
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
require.NoError(t, cmd.Run())

defer func() {
cli.Delete(t.Context(), agent) //nolint:errcheck
cli.Delete(t.Context(), modelCfg) //nolint:errcheck
}()

// Setup A2A client - use the agent's actual name
a2aURL := a2aUrl("kagent", "basic-openai-test-agent")
a2aClient, err := a2aclient.NewA2AClient(a2aURL)
require.NoError(t, err)

// Run tests
t.Run("sync_invocation_calculator", func(t *testing.T) {
runSyncTest(t, a2aClient, "What is 2+2?", "4", nil)
})

t.Run("streaming_invocation_weather", func(t *testing.T) {
runStreamingTest(t, a2aClient, "What is the weather in London?", "Rainy, 52°F")
})
}

func TestE2EInvokeCrewAIAgent(t *testing.T) {
mockllmCfg, err := mockllm.LoadConfigFromFile("mocks/invoke_crewai_agent.json", mocks)
require.NoError(t, err)
Expand Down
130 changes: 130 additions & 0 deletions go/test/e2e/mocks/invoke_openai_agent.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
{
"openai": [
{
"name": "calculate_request",
"match": {
"match_type": "contains",
"message": {
"content": "What is 2+2?",
"role": "user"
}
},
"response": {
"id": "chatcmpl-calc",
"object": "chat.completion",
"created": 1677652288,
"model": "gpt-4.1-mini",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": null,
"tool_calls": [
{
"id": "call_abc123",
"type": "function",
"function": {
"name": "calculate",
"arguments": "{\"expression\": \"2+2\"}"
}
}
]
},
"finish_reason": "tool_calls"
}
]
}
},
{
"name": "calculate_result",
"match": {
"match_type": "contains",
"message": {
"content": "4",
"role": "tool",
"tool_call_id": "call_abc123"
}
},
"response": {
"id": "chatcmpl-calc-result",
"object": "chat.completion",
"created": 1677652288,
"model": "gpt-4.1-mini",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "The result of 2+2 is 4"
},
"finish_reason": "stop"
}
]
}
},
{
"name": "weather_request",
"match": {
"match_type": "contains",
"message": {
"content": "What is the weather in London?",
"role": "user"
}
},
"response": {
"id": "chatcmpl-weather",
"object": "chat.completion",
"created": 1677652289,
"model": "gpt-4.1-mini",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": null,
"tool_calls": [
{
"id": "call_def456",
"type": "function",
"function": {
"name": "get_weather",
"arguments": "{\"location\": \"London\"}"
}
}
]
},
"finish_reason": "tool_calls"
}
]
}
},
{
"name": "weather_result",
"match": {
"match_type": "contains",
"message": {
"content": "Rainy, 52°F",
"role": "tool",
"tool_call_id": "call_def456"
}
},
"response": {
"id": "chatcmpl-weather-result",
"object": "chat.completion",
"created": 1677652289,
"model": "gpt-4.1-mini",
"choices": [
{
"index": 0,
"message": {
"role": "assistant",
"content": "The weather in london is Rainy, 52°F"
Copy link

Copilot AI Dec 4, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Inconsistent spelling: "london" should be capitalized to "London" to match the input and be consistent with the rest of the response. The weather data uses "london" as the key (line 58) which is fine for internal lookup, but the response should preserve the proper capitalization.

Change line 122 from:

"content": "The weather in london is Rainy, 52°F"

to:

"content": "The weather in London is Rainy, 52°F"
Suggested change
"content": "The weather in london is Rainy, 52°F"
"content": "The weather in London is Rainy, 52°F"

Copilot uses AI. Check for mistakes.
},
"finish_reason": "stop"
}
]
}
}
]
}
8 changes: 6 additions & 2 deletions python/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ COPY --chown=python:pythongroup .python-version .
COPY --chown=python:pythongroup uv.lock .
COPY --chown=python:pythongroup packages/kagent-adk packages/kagent-adk
COPY --chown=python:pythongroup packages/kagent-core packages/kagent-core
COPY --chown=python:pythongroup packages/kagent-skills packages/kagent-skills
COPY --chown=python:pythongroup README.md .

ARG VERSION
Expand All @@ -114,15 +115,18 @@ RUN --mount=type=cache,target=/.kagent/cache,uid=1001,gid=1001 \
&& echo "Installation complete."

# Create a separate venv for bash tool commands (sandbox environment)
# This venv does not have pip installed
Comment on lines 117 to +118
Copy link

Copilot AI Dec 4, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The comment on line 118 states "This venv does not have pip installed" but this is misleading. The bash tool description in prompts.py mentions "pip install" with a 120s timeout, and the _get_command_timeout_seconds function specifically handles "pip install" commands. If pip is not installed in the sandbox venv, then pip install commands will fail, making the timeout configuration useless.

Either:

  1. Install pip in the sandbox venv, OR
  2. Remove pip-related timeout logic and documentation if pip is intentionally excluded

Copilot uses AI. Check for mistakes.
RUN --mount=type=cache,target=/.kagent/cache,uid=1001,gid=1001 \
echo "Creating bash tool sandbox environment..." \
&& mkdir -p /.kagent/sandbox-venv \
&& uv venv --python=python$TOOLS_PYTHON_VERSION /.kagent/sandbox-venv \
&& echo "Bash tool sandbox environment created."

ENV PATH="/.kagent/.venv/bin:$PATH"
ENV UV_PROJECT_ENVIRONMENT=/app/.venv
ENV BASH_VENV_PATH="/.kagent/sandbox-venv"
ENV PYTHONPATH="/.kagent:/"
Copy link

Copilot AI Dec 4, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Inconsistent environment variable naming: The Dockerfile uses PYTHONPATH=/.kagent:/ (line 126) which adds /.kagent to the path, but the kagent-skills shell.py adds the working directory and /skills to PYTHONPATH (line 124). This could lead to import confusion.

The /.kagent path in the Dockerfile doesn't seem to be referenced anywhere else in the codebase, and it's unclear what should be imported from there. Consider documenting why /.kagent needs to be on PYTHONPATH or removing it if it's not necessary.

Suggested change
ENV PYTHONPATH="/.kagent:/"

Copilot uses AI. Check for mistakes.
ENV UV_PROJECT_ENVIRONMENT=/.kagent/.venv
ENV BASH_VENV_PATH=/.kagent/sandbox-venv
ENV VIRTUAL_ENV=/.kagent/.venv

WORKDIR /app

Expand Down
4 changes: 4 additions & 0 deletions python/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -73,3 +73,7 @@ generate-test-certs:
rm -f server-extensions.conf server-req.pem && \
echo "Test certificates generated successfully"; \
fi

.PHONY: basic-openai-sample
basic-openai-sample:
docker build . -f samples/openai/basic_agent/Dockerfile --tag localhost:5001/basic-openai:latest --push
2 changes: 2 additions & 0 deletions python/packages/kagent-adk/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ dependencies = [
"agentsts-adk >= 0.0.6",
"agentsts-core >= 0.0.6",
"kagent-core",
"kagent-skills",
"aiofiles>=24.1.0",
"anyio>=4.9.0",
"typer>=0.15.0",
Expand All @@ -34,6 +35,7 @@ dependencies = [

[tool.uv.sources]
kagent-core = {workspace = true}
kagent-skills = {workspace = true}

[project.scripts]
kagent-adk = "kagent.adk.cli:run_cli"
Expand Down
2 changes: 1 addition & 1 deletion python/packages/kagent-adk/src/kagent/adk/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from . import AgentConfig, KAgentApp
from .skill_fetcher import fetch_skill
from .skills.skills_plugin import SkillsPlugin
from .tools.skills_plugin import SkillsPlugin

logger = logging.getLogger(__name__)
logging.getLogger("google_adk.google.adk.tools.base_authenticated_tool").setLevel(logging.ERROR)
Expand Down
10 changes: 0 additions & 10 deletions python/packages/kagent-adk/src/kagent/adk/skills/__init__.py

This file was deleted.

Loading
Loading