diff --git a/Makefile b/Makefile
index ab2ba5e84..5af603e3d 100644
--- a/Makefile
+++ b/Makefile
@@ -167,7 +167,8 @@ push-test-agent: buildx-create build-kagent-adk
$(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/kebab:latest -f go/test/e2e/agents/kebab/Dockerfile ./go/test/e2e/agents/kebab
kubectl apply --namespace kagent --context kind-$(KIND_CLUSTER_NAME) -f go/test/e2e/agents/kebab/agent.yaml
$(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/poem-flow:latest -f python/samples/crewai/poem_flow/Dockerfile ./python
-
+ $(DOCKER_BUILDER) build --push $(BUILD_ARGS) $(TOOLS_IMAGE_BUILD_ARGS) -t $(DOCKER_REGISTRY)/basic-openai:latest -f python/samples/openai/basic_agent/Dockerfile ./python
+
.PHONY: push-test-skill
push-test-skill: buildx-create
echo "Building FROM DOCKER_REGISTRY=$(DOCKER_REGISTRY)/$(DOCKER_REPO)/kebab-maker:$(VERSION)"
diff --git a/go/test/e2e/invoke_api_test.go b/go/test/e2e/invoke_api_test.go
index 38e88abb0..859063d28 100644
--- a/go/test/e2e/invoke_api_test.go
+++ b/go/test/e2e/invoke_api_test.go
@@ -501,8 +501,57 @@ func TestE2EInvokeDeclarativeAgentWithMcpServerTool(t *testing.T) {
})
}
-// This function generates a CrewAI agent that uses a mock LLM server
-// Assumes that the image is built and pushed to registry, the agent can be found in python/samples/crewai/poem_flow
+// This function generates an OpenAI BYO agent that uses a mock LLM server
+// Assumes that the image is built and pushed to registry
+func generateOpenAIAgent(baseURL string) *v1alpha2.Agent {
+ return &v1alpha2.Agent{
+ ObjectMeta: metav1.ObjectMeta{
+ Name: "basic-openai-test-agent",
+ Namespace: "kagent",
+ },
+ Spec: v1alpha2.AgentSpec{
+ Description: "A basic OpenAI agent with calculator and weather tools",
+ Type: v1alpha2.AgentType_BYO,
+ BYO: &v1alpha2.BYOAgentSpec{
+ Deployment: &v1alpha2.ByoDeploymentSpec{
+ Image: "localhost:5001/basic-openai:latest",
+ SharedDeploymentSpec: v1alpha2.SharedDeploymentSpec{
+ Env: []corev1.EnvVar{
+ {
+ Name: "OPENAI_API_KEY",
+ ValueFrom: &corev1.EnvVarSource{
+ SecretKeyRef: &corev1.SecretKeySelector{
+ LocalObjectReference: corev1.LocalObjectReference{
+ Name: "kagent-openai",
+ },
+ Key: "OPENAI_API_KEY",
+ },
+ },
+ },
+ {
+ Name: "OPENAI_API_BASE",
+ Value: baseURL + "/v1",
+ },
+ {
+ Name: "KAGENT_NAME",
+ Value: "basic-openai-test-agent",
+ },
+ {
+ Name: "KAGENT_NAMESPACE",
+ ValueFrom: &corev1.EnvVarSource{
+ FieldRef: &corev1.ObjectFieldSelector{
+ FieldPath: "metadata.namespace",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+}
+
func generateCrewAIAgent(baseURL string) *v1alpha2.Agent {
return &v1alpha2.Agent{
ObjectMeta: metav1.ObjectMeta{
@@ -541,6 +590,59 @@ func generateCrewAIAgent(baseURL string) *v1alpha2.Agent {
}
}
+func TestE2EInvokeOpenAIAgent(t *testing.T) {
+ // Setup mock server
+ baseURL, stopServer := setupMockServer(t, "mocks/invoke_openai_agent.json")
+ defer stopServer()
+
+ // Setup Kubernetes client
+ cli := setupK8sClient(t, false)
+
+ // Setup specific resources
+ modelCfg := setupModelConfig(t, cli, baseURL)
+ agent := generateOpenAIAgent(baseURL)
+
+ // Create the agent on the cluster
+ err := cli.Create(t.Context(), agent)
+ require.NoError(t, err)
+
+ // Wait for agent to be ready
+ args := []string{
+ "wait",
+ "--for",
+ "condition=Ready",
+ "--timeout=1m",
+ "agents.kagent.dev",
+ agent.Name,
+ "-n",
+ agent.Namespace,
+ }
+
+ cmd := exec.CommandContext(t.Context(), "kubectl", args...)
+ cmd.Stdout = os.Stdout
+ cmd.Stderr = os.Stderr
+ require.NoError(t, cmd.Run())
+
+ defer func() {
+ cli.Delete(t.Context(), agent) //nolint:errcheck
+ cli.Delete(t.Context(), modelCfg) //nolint:errcheck
+ }()
+
+ // Setup A2A client - use the agent's actual name
+ a2aURL := a2aUrl("kagent", "basic-openai-test-agent")
+ a2aClient, err := a2aclient.NewA2AClient(a2aURL)
+ require.NoError(t, err)
+
+ // Run tests
+ t.Run("sync_invocation_calculator", func(t *testing.T) {
+ runSyncTest(t, a2aClient, "What is 2+2?", "4", nil)
+ })
+
+ t.Run("streaming_invocation_weather", func(t *testing.T) {
+ runStreamingTest(t, a2aClient, "What is the weather in London?", "Rainy, 52°F")
+ })
+}
+
func TestE2EInvokeCrewAIAgent(t *testing.T) {
mockllmCfg, err := mockllm.LoadConfigFromFile("mocks/invoke_crewai_agent.json", mocks)
require.NoError(t, err)
diff --git a/go/test/e2e/mocks/invoke_openai_agent.json b/go/test/e2e/mocks/invoke_openai_agent.json
new file mode 100644
index 000000000..128aea9ab
--- /dev/null
+++ b/go/test/e2e/mocks/invoke_openai_agent.json
@@ -0,0 +1,130 @@
+{
+ "openai": [
+ {
+ "name": "calculate_request",
+ "match": {
+ "match_type": "contains",
+ "message": {
+ "content": "What is 2+2?",
+ "role": "user"
+ }
+ },
+ "response": {
+ "id": "chatcmpl-calc",
+ "object": "chat.completion",
+ "created": 1677652288,
+ "model": "gpt-4.1-mini",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": null,
+ "tool_calls": [
+ {
+ "id": "call_abc123",
+ "type": "function",
+ "function": {
+ "name": "calculate",
+ "arguments": "{\"expression\": \"2+2\"}"
+ }
+ }
+ ]
+ },
+ "finish_reason": "tool_calls"
+ }
+ ]
+ }
+ },
+ {
+ "name": "calculate_result",
+ "match": {
+ "match_type": "contains",
+ "message": {
+ "content": "4",
+ "role": "tool",
+ "tool_call_id": "call_abc123"
+ }
+ },
+ "response": {
+ "id": "chatcmpl-calc-result",
+ "object": "chat.completion",
+ "created": 1677652288,
+ "model": "gpt-4.1-mini",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": "The result of 2+2 is 4"
+ },
+ "finish_reason": "stop"
+ }
+ ]
+ }
+ },
+ {
+ "name": "weather_request",
+ "match": {
+ "match_type": "contains",
+ "message": {
+ "content": "What is the weather in London?",
+ "role": "user"
+ }
+ },
+ "response": {
+ "id": "chatcmpl-weather",
+ "object": "chat.completion",
+ "created": 1677652289,
+ "model": "gpt-4.1-mini",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": null,
+ "tool_calls": [
+ {
+ "id": "call_def456",
+ "type": "function",
+ "function": {
+ "name": "get_weather",
+ "arguments": "{\"location\": \"London\"}"
+ }
+ }
+ ]
+ },
+ "finish_reason": "tool_calls"
+ }
+ ]
+ }
+ },
+ {
+ "name": "weather_result",
+ "match": {
+ "match_type": "contains",
+ "message": {
+ "content": "Rainy, 52°F",
+ "role": "tool",
+ "tool_call_id": "call_def456"
+ }
+ },
+ "response": {
+ "id": "chatcmpl-weather-result",
+ "object": "chat.completion",
+ "created": 1677652289,
+ "model": "gpt-4.1-mini",
+ "choices": [
+ {
+ "index": 0,
+ "message": {
+ "role": "assistant",
+ "content": "The weather in London is Rainy, 52°F"
+ },
+ "finish_reason": "stop"
+ }
+ ]
+ }
+ }
+ ]
+}
diff --git a/python/Dockerfile b/python/Dockerfile
index a0ff7a4de..cae8face9 100644
--- a/python/Dockerfile
+++ b/python/Dockerfile
@@ -101,6 +101,7 @@ COPY --chown=python:pythongroup .python-version .
COPY --chown=python:pythongroup uv.lock .
COPY --chown=python:pythongroup packages/kagent-adk packages/kagent-adk
COPY --chown=python:pythongroup packages/kagent-core packages/kagent-core
+COPY --chown=python:pythongroup packages/kagent-skills packages/kagent-skills
COPY --chown=python:pythongroup README.md .
ARG VERSION
@@ -114,6 +115,7 @@ RUN --mount=type=cache,target=/.kagent/cache,uid=1001,gid=1001 \
&& echo "Installation complete."
# Create a separate venv for bash tool commands (sandbox environment)
+# This venv does not have pip installed
RUN --mount=type=cache,target=/.kagent/cache,uid=1001,gid=1001 \
echo "Creating bash tool sandbox environment..." \
&& mkdir -p /.kagent/sandbox-venv \
@@ -122,7 +124,8 @@ RUN --mount=type=cache,target=/.kagent/cache,uid=1001,gid=1001 \
ENV PATH="/.kagent/.venv/bin:$PATH"
ENV UV_PROJECT_ENVIRONMENT=/app/.venv
-ENV BASH_VENV_PATH="/.kagent/sandbox-venv"
+ENV BASH_VENV_PATH=/.kagent/sandbox-venv
+ENV VIRTUAL_ENV=/.kagent/.venv
WORKDIR /app
diff --git a/python/Makefile b/python/Makefile
index 982d82441..d12e08e4a 100644
--- a/python/Makefile
+++ b/python/Makefile
@@ -73,3 +73,7 @@ generate-test-certs:
rm -f server-extensions.conf server-req.pem && \
echo "Test certificates generated successfully"; \
fi
+
+.PHONY: basic-openai-sample
+basic-openai-sample:
+ docker build . -f samples/openai/basic_agent/Dockerfile --tag localhost:5001/basic-openai:latest --push
diff --git a/python/packages/kagent-adk/pyproject.toml b/python/packages/kagent-adk/pyproject.toml
index 7daa18f49..fffeee1f0 100644
--- a/python/packages/kagent-adk/pyproject.toml
+++ b/python/packages/kagent-adk/pyproject.toml
@@ -12,6 +12,7 @@ dependencies = [
"agentsts-adk >= 0.0.8",
"agentsts-core >= 0.0.8",
"kagent-core",
+ "kagent-skills",
"aiofiles>=24.1.0",
"anyio>=4.9.0",
"typer>=0.15.0",
@@ -34,6 +35,7 @@ dependencies = [
[tool.uv.sources]
kagent-core = {workspace = true}
+kagent-skills = {workspace = true}
[project.scripts]
kagent-adk = "kagent.adk.cli:run_cli"
diff --git a/python/packages/kagent-adk/src/kagent/adk/cli.py b/python/packages/kagent-adk/src/kagent/adk/cli.py
index 90a640920..d447066e0 100644
--- a/python/packages/kagent-adk/src/kagent/adk/cli.py
+++ b/python/packages/kagent-adk/src/kagent/adk/cli.py
@@ -16,7 +16,7 @@
from . import AgentConfig, KAgentApp
from .skill_fetcher import fetch_skill
-from .skills.skills_plugin import add_skills_tool_to_agent
+from .tools import add_skills_tool_to_agent
logger = logging.getLogger(__name__)
logging.getLogger("google_adk.google.adk.tools.base_authenticated_tool").setLevel(logging.ERROR)
diff --git a/python/packages/kagent-adk/src/kagent/adk/skills/__init__.py b/python/packages/kagent-adk/src/kagent/adk/skills/__init__.py
deleted file mode 100644
index 9b20ff650..000000000
--- a/python/packages/kagent-adk/src/kagent/adk/skills/__init__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-from .skill_tool import SkillsTool
-from .skills_toolset import SkillsToolset
-
-__all__ = [
- "SkillsTool",
- "SkillsToolset",
- "generate_shell_skills_system_prompt",
-]
diff --git a/python/packages/kagent-adk/src/kagent/adk/skills/skill_tool.py b/python/packages/kagent-adk/src/kagent/adk/skills/skill_tool.py
deleted file mode 100644
index 56852e7ed..000000000
--- a/python/packages/kagent-adk/src/kagent/adk/skills/skill_tool.py
+++ /dev/null
@@ -1,203 +0,0 @@
-"""Tool for discovering and loading skills."""
-
-from __future__ import annotations
-
-import logging
-from pathlib import Path
-from typing import Any, Dict, Optional
-
-import yaml
-from google.adk.tools import BaseTool, ToolContext
-from google.genai import types
-from pydantic import BaseModel
-
-logger = logging.getLogger("kagent_adk." + __name__)
-
-
-class Skill(BaseModel):
- """Represents the metadata for a skill.
-
- This is a simple data container used during the initial skill discovery
- phase to hold the information parsed from a skill's SKILL.md frontmatter.
- """
-
- name: str
- """The unique name/identifier of the skill."""
-
- description: str
- """A description of what the skill does and when to use it."""
-
- license: Optional[str] = None
- """Optional license information for the skill."""
-
-
-class SkillsTool(BaseTool):
- """Discover and load skill instructions.
-
- This tool dynamically discovers available skills and embeds their metadata in the
- tool description. Agent invokes a skill by name to load its full instructions.
- """
-
- def __init__(self, skills_directory: str | Path):
- self.skills_directory = Path(skills_directory).resolve()
- if not self.skills_directory.exists():
- raise ValueError(f"Skills directory does not exist: {self.skills_directory}")
-
- self._skill_cache: Dict[str, str] = {}
-
- # Generate description with available skills embedded
- description = self._generate_description_with_skills()
-
- super().__init__(
- name="skills",
- description=description,
- )
-
- def _generate_description_with_skills(self) -> str:
- """Generate tool description with available skills embedded."""
- base_description = (
- "Execute a skill within the main conversation\n\n"
- "\n"
- "When users ask you to perform tasks, check if any of the available skills below can help "
- "complete the task more effectively. Skills provide specialized capabilities and domain knowledge.\n\n"
- "How to use skills:\n"
- "- Invoke skills using this tool with the skill name only (no arguments)\n"
- "- When you invoke a skill, the skill's full SKILL.md will load with detailed instructions\n"
- "- Follow the skill's instructions and use the bash tool to execute commands\n"
- "- Examples:\n"
- ' - command: "data-analysis" - invoke the data-analysis skill\n'
- ' - command: "pdf-processing" - invoke the pdf-processing skill\n\n'
- "Important:\n"
- "- Only use skills listed in below\n"
- "- Do not invoke a skill that is already loaded in the conversation\n"
- "- After loading a skill, use the bash tool for execution\n"
- "- If not specified, scripts are located in the skill-name/scripts subdirectory\n"
- "\n\n"
- )
-
- # Discover and append available skills
- skills_xml = self._discover_skills()
- return base_description + skills_xml
-
- def _discover_skills(self) -> str:
- """Discover available skills and format as XML."""
- if not self.skills_directory.exists():
- return "\n\n\n"
-
- skills_entries = []
- for skill_dir in sorted(self.skills_directory.iterdir()):
- if not skill_dir.is_dir():
- continue
-
- skill_file = skill_dir / "SKILL.md"
- if not skill_file.exists():
- continue
-
- try:
- metadata = self._parse_skill_metadata(skill_file)
- if metadata:
- skill_xml = (
- "\n"
- f"{metadata['name']}\n"
- f"{metadata['description']}\n"
- ""
- )
- skills_entries.append(skill_xml)
- except Exception as e:
- logger.error(f"Failed to parse skill {skill_dir.name}: {e}")
-
- if not skills_entries:
- return "\n\n\n"
-
- return "\n" + "\n".join(skills_entries) + "\n\n"
-
- def _get_declaration(self) -> types.FunctionDeclaration:
- return types.FunctionDeclaration(
- name=self.name,
- description=self.description,
- parameters=types.Schema(
- type=types.Type.OBJECT,
- properties={
- "command": types.Schema(
- type=types.Type.STRING,
- description='The skill name (no arguments). E.g., "data-analysis" or "pdf-processing"',
- ),
- },
- required=["command"],
- ),
- )
-
- async def run_async(self, *, args: Dict[str, Any], tool_context: ToolContext) -> str:
- """Execute skill loading by name."""
- skill_name = args.get("command", "").strip()
-
- if not skill_name:
- return "Error: No skill name provided"
-
- return self._invoke_skill(skill_name)
-
- def _invoke_skill(self, skill_name: str) -> str:
- """Load and return the full content of a skill."""
- # Check cache first
- if skill_name in self._skill_cache:
- return self._skill_cache[skill_name]
-
- # Find skill directory
- skill_dir = self.skills_directory / skill_name
- if not skill_dir.exists() or not skill_dir.is_dir():
- return f"Error: Skill '{skill_name}' not found. Check the available skills list in the tool description."
-
- skill_file = skill_dir / "SKILL.md"
- if not skill_file.exists():
- return f"Error: Skill '{skill_name}' has no SKILL.md file."
-
- try:
- with open(skill_file, "r", encoding="utf-8") as f:
- content = f.read()
-
- formatted_content = self._format_skill_content(skill_name, content)
-
- # Cache the formatted content
- self._skill_cache[skill_name] = formatted_content
-
- return formatted_content
-
- except Exception as e:
- logger.error(f"Failed to load skill {skill_name}: {e}")
- return f"Error loading skill '{skill_name}': {e}"
-
- def _parse_skill_metadata(self, skill_file: Path) -> Dict[str, str] | None:
- """Parse YAML frontmatter from a SKILL.md file."""
- try:
- with open(skill_file, "r", encoding="utf-8") as f:
- content = f.read()
-
- if not content.startswith("---"):
- return None
-
- parts = content.split("---", 2)
- if len(parts) < 3:
- return None
-
- metadata = yaml.safe_load(parts[1])
- if isinstance(metadata, dict) and "name" in metadata and "description" in metadata:
- return {
- "name": metadata["name"],
- "description": metadata["description"],
- }
- return None
- except Exception as e:
- logger.error(f"Failed to parse metadata from {skill_file}: {e}")
- return None
-
- def _format_skill_content(self, skill_name: str, content: str) -> str:
- """Format skill content for display to the agent."""
- header = (
- f'The "{skill_name}" skill is loading\n\n'
- f"Base directory for this skill: {self.skills_directory}/{skill_name}\n\n"
- )
- footer = (
- "\n\n---\n"
- "The skill has been loaded. Follow the instructions above and use the bash tool to execute commands."
- )
- return header + content + footer
diff --git a/python/packages/kagent-adk/src/kagent/adk/skills/README.md b/python/packages/kagent-adk/src/kagent/adk/tools/README.md
similarity index 100%
rename from python/packages/kagent-adk/src/kagent/adk/skills/README.md
rename to python/packages/kagent-adk/src/kagent/adk/tools/README.md
diff --git a/python/packages/kagent-adk/src/kagent/adk/tools/__init__.py b/python/packages/kagent-adk/src/kagent/adk/tools/__init__.py
index e844c4b7d..062f17e8e 100644
--- a/python/packages/kagent-adk/src/kagent/adk/tools/__init__.py
+++ b/python/packages/kagent-adk/src/kagent/adk/tools/__init__.py
@@ -1,9 +1,15 @@
from .bash_tool import BashTool
from .file_tools import EditFileTool, ReadFileTool, WriteFileTool
+from .skill_tool import SkillsTool
+from .skills_plugin import add_skills_tool_to_agent
+from .skills_toolset import SkillsToolset
__all__ = [
+ "SkillsTool",
+ "SkillsToolset",
"BashTool",
"EditFileTool",
"ReadFileTool",
"WriteFileTool",
+ "add_skills_tool_to_agent",
]
diff --git a/python/packages/kagent-adk/src/kagent/adk/tools/bash_tool.py b/python/packages/kagent-adk/src/kagent/adk/tools/bash_tool.py
index 7c23c389a..75d2f60e9 100644
--- a/python/packages/kagent-adk/src/kagent/adk/tools/bash_tool.py
+++ b/python/packages/kagent-adk/src/kagent/adk/tools/bash_tool.py
@@ -2,16 +2,13 @@
from __future__ import annotations
-import asyncio
import logging
-import os
from pathlib import Path
from typing import Any, Dict
from google.adk.tools import BaseTool, ToolContext
from google.genai import types
-
-from ..artifacts import get_session_path
+from kagent.skills import execute_command, get_bash_description, get_session_path
logger = logging.getLogger("kagent_adk." + __name__)
@@ -31,26 +28,7 @@ class BashTool(BaseTool):
def __init__(self, skills_directory: str | Path):
super().__init__(
name="bash",
- description=(
- "Execute bash commands in the skills environment with sandbox protection.\n\n"
- "Working Directory & Structure:\n"
- "- Commands run in a temporary session directory: /tmp/kagent/{session_id}/\n"
- "- /skills -> All skills are available here (read-only).\n"
- "- Your current working directory is added to PYTHONPATH.\n\n"
- "Python Imports (CRITICAL):\n"
- "- To import from a skill, use the full path from the 'skills' root.\n"
- " Example: from skills.skills_name.module import function\n\n"
- "- If the skills name contains a dash '-', you need to use importlib to import it.\n"
- " Example:\n"
- " import importlib\n"
- " skill_module = importlib.import_module('skills.skill-name.module')\n\n"
- "For file operations:\n"
- "- Use read_file, write_file, and edit_file for interacting with the filesystem.\n\n"
- "Timeouts:\n"
- "- pip install: 120s\n"
- "- python scripts: 60s\n"
- "- other commands: 30s\n"
- ),
+ description=get_bash_description(),
)
self.skills_directory = Path(skills_directory).resolve()
if not self.skills_directory.exists():
@@ -85,99 +63,11 @@ async def run_async(self, *, args: Dict[str, Any], tool_context: ToolContext) ->
return "Error: No command provided"
try:
- result = await self._execute_command_with_srt(command, tool_context)
+ working_dir = get_session_path(session_id=tool_context.session.id)
+ result = await execute_command(command, working_dir)
logger.info(f"Executed bash command: {command}, description: {description}")
return result
except Exception as e:
error_msg = f"Error executing command '{command}': {e}"
logger.error(error_msg)
return error_msg
-
- async def _execute_command_with_srt(self, command: str, tool_context: ToolContext) -> str:
- """Execute a bash command safely using the Anthropic Sandbox Runtime.
-
- The srt (Sandbox Runtime) wraps the command in a secure sandbox that enforces
- filesystem and network restrictions at the OS level.
-
- The working directory is a temporary session path, which contains:
- - uploads/: staged user files
- - outputs/: location for generated files
- The /skills directory is available at the root and on the PYTHONPATH.
- """
- # Get session working directory (initialized by SkillsPlugin)
- working_dir = get_session_path(session_id=tool_context.session.id)
-
- # Determine timeout based on command
- timeout = self._get_command_timeout_seconds(command)
-
- # Prepare environment with PYTHONPATH including skills directory
- # This allows imports like: from skills.slack_gif_creator.core import something
- env = os.environ.copy()
- # Add root for 'from skills...' and working_dir for local scripts
- pythonpath_additions = [str(working_dir), "/"]
- if "PYTHONPATH" in env:
- pythonpath_additions.append(env["PYTHONPATH"])
- env["PYTHONPATH"] = ":".join(pythonpath_additions)
-
- # Use the separate bash tool venv instead of the app's venv
- bash_venv_path = os.environ.get("BASH_VENV_PATH", "/.kagent/sandbox-venv")
- bash_venv_bin = os.path.join(bash_venv_path, "bin")
- # Prepend bash venv to PATH so its python and pip are used
- env["PATH"] = f"{bash_venv_bin}:{env.get('PATH', '')}"
- env["VIRTUAL_ENV"] = bash_venv_path
-
- # Execute with sandbox runtime
- sandboxed_command = f'srt "{command}"'
-
- try:
- process = await asyncio.create_subprocess_shell(
- sandboxed_command,
- stdout=asyncio.subprocess.PIPE,
- stderr=asyncio.subprocess.PIPE,
- cwd=working_dir,
- env=env, # Pass the modified environment
- )
-
- try:
- stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=timeout)
- except asyncio.TimeoutError:
- process.kill()
- await process.wait()
- return f"Error: Command timed out after {timeout}s"
-
- stdout_str = stdout.decode("utf-8", errors="replace") if stdout else ""
- stderr_str = stderr.decode("utf-8", errors="replace") if stderr else ""
-
- # Handle command failure
- if process.returncode != 0:
- error_msg = f"Command failed with exit code {process.returncode}"
- if stderr_str:
- error_msg += f":\n{stderr_str}"
- elif stdout_str:
- error_msg += f":\n{stdout_str}"
- return error_msg
-
- # Return output
- output = stdout_str
- if stderr_str and "WARNING" not in stderr_str:
- output += f"\n{stderr_str}"
-
- return output.strip() if output.strip() else "Command completed successfully."
-
- except Exception as e:
- logger.error(f"Error executing command: {e}")
- return f"Error: {e}"
-
- def _get_command_timeout_seconds(self, command: str) -> float:
- """Determine appropriate timeout for command in seconds.
-
- Based on the command string, determine the timeout. srt timeout is in milliseconds,
- so we return seconds for asyncio compatibility.
- """
- # Check for keywords in the command to determine timeout
- if "pip install" in command or "pip3 install" in command:
- return 120.0 # 2 minutes for package installations
- elif "python " in command or "python3 " in command:
- return 60.0 # 1 minute for python scripts
- else:
- return 30.0 # 30 seconds for other commands
diff --git a/python/packages/kagent-adk/src/kagent/adk/tools/file_tools.py b/python/packages/kagent-adk/src/kagent/adk/tools/file_tools.py
index a6675e9e3..b8c1053aa 100644
--- a/python/packages/kagent-adk/src/kagent/adk/tools/file_tools.py
+++ b/python/packages/kagent-adk/src/kagent/adk/tools/file_tools.py
@@ -12,8 +12,15 @@
from google.adk.tools import BaseTool, ToolContext
from google.genai import types
-
-from ..artifacts import get_session_path
+from kagent.skills import (
+ edit_file_content,
+ get_edit_file_description,
+ get_read_file_description,
+ get_session_path,
+ get_write_file_description,
+ read_file_content,
+ write_file_content,
+)
logger = logging.getLogger("kagent_adk." + __name__)
@@ -24,16 +31,7 @@ class ReadFileTool(BaseTool):
def __init__(self):
super().__init__(
name="read_file",
- description=(
- "Reads a file from the filesystem with line numbers.\n\n"
- "Usage:\n"
- "- Provide a path to the file (absolute or relative to your working directory)\n"
- "- Returns content with line numbers (format: LINE_NUMBER|CONTENT)\n"
- "- Optional offset and limit parameters for reading specific line ranges\n"
- "- Lines longer than 2000 characters are truncated\n"
- "- Always read a file before editing it\n"
- "- You can read from skills/ directory, uploads/, outputs/, or any file in your session\n"
- ),
+ description=get_read_file_description(),
)
def _get_declaration(self) -> types.FunctionDeclaration:
@@ -62,47 +60,23 @@ def _get_declaration(self) -> types.FunctionDeclaration:
async def run_async(self, *, args: Dict[str, Any], tool_context: ToolContext) -> str:
"""Read a file with line numbers."""
- file_path = args.get("file_path", "").strip()
+ file_path_str = args.get("file_path", "").strip()
offset = args.get("offset")
limit = args.get("limit")
- if not file_path:
+ if not file_path_str:
return "Error: No file path provided"
- # Resolve path relative to session working directory
- working_dir = get_session_path(session_id=tool_context.session.id)
- path = Path(file_path)
- if not path.is_absolute():
- path = working_dir / path
- path = path.resolve()
-
- if not path.exists():
- return f"Error: File not found: {file_path}"
-
- if not path.is_file():
- return f"Error: Path is not a file: {file_path}\nThis tool can only read files, not directories."
-
try:
- lines = path.read_text().splitlines()
- except Exception as e:
- return f"Error reading file {file_path}: {e}"
-
- # Handle offset and limit
- start = (offset - 1) if offset and offset > 0 else 0
- end = (start + limit) if limit else len(lines)
+ working_dir = get_session_path(session_id=tool_context.session.id)
+ path = Path(file_path_str)
+ if not path.is_absolute():
+ path = working_dir / path
+ path = path.resolve()
- # Format with line numbers
- result_lines = []
- for i, line in enumerate(lines[start:end], start=start + 1):
- # Truncate long lines
- if len(line) > 2000:
- line = line[:2000] + "..."
- result_lines.append(f"{i:6d}|{line}")
-
- if not result_lines:
- return "File is empty."
-
- return "\n".join(result_lines)
+ return read_file_content(path, offset, limit)
+ except (FileNotFoundError, IsADirectoryError, IOError) as e:
+ return f"Error reading file {file_path_str}: {e}"
class WriteFileTool(BaseTool):
@@ -111,17 +85,7 @@ class WriteFileTool(BaseTool):
def __init__(self):
super().__init__(
name="write_file",
- description=(
- "Writes content to a file on the filesystem.\n\n"
- "Usage:\n"
- "- Provide a path (absolute or relative to working directory) and content to write\n"
- "- Overwrites existing files\n"
- "- Creates parent directories if needed\n"
- "- For existing files, read them first using read_file\n"
- "- Prefer editing existing files over writing new ones\n"
- "- You can write to your working directory, outputs/, or any writable location\n"
- "- Note: skills/ directory is read-only\n"
- ),
+ description=get_write_file_description(),
)
def _get_declaration(self) -> types.FunctionDeclaration:
@@ -146,27 +110,22 @@ def _get_declaration(self) -> types.FunctionDeclaration:
async def run_async(self, *, args: Dict[str, Any], tool_context: ToolContext) -> str:
"""Write content to a file."""
- file_path = args.get("file_path", "").strip()
+ file_path_str = args.get("file_path", "").strip()
content = args.get("content", "")
- if not file_path:
+ if not file_path_str:
return "Error: No file path provided"
- # Resolve path relative to session working directory
- working_dir = get_session_path(session_id=tool_context.session.id)
- path = Path(file_path)
- if not path.is_absolute():
- path = working_dir / path
- path = path.resolve()
-
try:
- # Create parent directories if needed
- path.parent.mkdir(parents=True, exist_ok=True)
- path.write_text(content)
- logger.info(f"Successfully wrote to {file_path}")
- return f"Successfully wrote to {file_path}"
- except Exception as e:
- error_msg = f"Error writing file {file_path}: {e}"
+ working_dir = get_session_path(session_id=tool_context.session.id)
+ path = Path(file_path_str)
+ if not path.is_absolute():
+ path = working_dir / path
+ path = path.resolve()
+
+ return write_file_content(path, content)
+ except IOError as e:
+ error_msg = f"Error writing file {file_path_str}: {e}"
logger.error(error_msg)
return error_msg
@@ -177,18 +136,7 @@ class EditFileTool(BaseTool):
def __init__(self):
super().__init__(
name="edit_file",
- description=(
- "Performs exact string replacements in files.\n\n"
- "Usage:\n"
- "- You must read the file first using read_file\n"
- "- Provide path (absolute or relative to working directory)\n"
- "- When editing, preserve exact indentation from the file content\n"
- "- Do NOT include line number prefixes in old_string or new_string\n"
- "- old_string must be unique unless replace_all=true\n"
- "- Use replace_all to rename variables/strings throughout the file\n"
- "- old_string and new_string must be different\n"
- "- Note: skills/ directory is read-only\n"
- ),
+ description=get_edit_file_description(),
)
def _get_declaration(self) -> types.FunctionDeclaration:
@@ -221,63 +169,23 @@ def _get_declaration(self) -> types.FunctionDeclaration:
async def run_async(self, *, args: Dict[str, Any], tool_context: ToolContext) -> str:
"""Edit a file by replacing old_string with new_string."""
- file_path = args.get("file_path", "").strip()
+ file_path_str = args.get("file_path", "").strip()
old_string = args.get("old_string", "")
new_string = args.get("new_string", "")
replace_all = args.get("replace_all", False)
- if not file_path:
+ if not file_path_str:
return "Error: No file path provided"
- if old_string == new_string:
- return "Error: old_string and new_string must be different"
-
- # Resolve path relative to session working directory
- working_dir = get_session_path(session_id=tool_context.session.id)
- path = Path(file_path)
- if not path.is_absolute():
- path = working_dir / path
- path = path.resolve()
-
- if not path.exists():
- return f"Error: File not found: {file_path}"
-
- if not path.is_file():
- return f"Error: Path is not a file: {file_path}"
-
- try:
- content = path.read_text()
- except Exception as e:
- return f"Error reading file {file_path}: {e}"
-
- # Check if old_string exists
- if old_string not in content:
- return (
- f"Error: old_string not found in {file_path}.\n"
- f"Make sure you've read the file first and are using the exact string."
- )
-
- # Count occurrences
- count = content.count(old_string)
-
- if not replace_all and count > 1:
- return (
- f"Error: old_string appears {count} times in {file_path}.\n"
- f"Either provide more context to make it unique, or set "
- f"replace_all=true to replace all occurrences."
- )
-
- # Perform replacement
- if replace_all:
- new_content = content.replace(old_string, new_string)
- else:
- new_content = content.replace(old_string, new_string, 1)
-
try:
- path.write_text(new_content)
- logger.info(f"Successfully replaced {count} occurrence(s) in {file_path}")
- return f"Successfully replaced {count} occurrence(s) in {file_path}"
- except Exception as e:
- error_msg = f"Error writing file {file_path}: {e}"
+ working_dir = get_session_path(session_id=tool_context.session.id)
+ path = Path(file_path_str)
+ if not path.is_absolute():
+ path = working_dir / path
+ path = path.resolve()
+
+ return edit_file_content(path, old_string, new_string, replace_all)
+ except (FileNotFoundError, IsADirectoryError, ValueError, IOError) as e:
+ error_msg = f"Error editing file {file_path_str}: {e}"
logger.error(error_msg)
return error_msg
diff --git a/python/packages/kagent-adk/src/kagent/adk/tools/skill_tool.py b/python/packages/kagent-adk/src/kagent/adk/tools/skill_tool.py
new file mode 100644
index 000000000..bb47d2fac
--- /dev/null
+++ b/python/packages/kagent-adk/src/kagent/adk/tools/skill_tool.py
@@ -0,0 +1,103 @@
+"""Tool for discovering and loading skills."""
+
+from __future__ import annotations
+
+import logging
+from pathlib import Path
+from typing import Any, Dict
+
+from google.adk.tools import BaseTool, ToolContext
+from google.genai import types
+from kagent.skills import (
+ discover_skills,
+ generate_skills_tool_description,
+ load_skill_content,
+)
+
+logger = logging.getLogger("kagent_adk." + __name__)
+
+
+class SkillsTool(BaseTool):
+ """Discover and load skill instructions.
+
+ This tool dynamically discovers available skills and embeds their metadata in the
+ tool description. Agent invokes a skill by name to load its full instructions.
+ """
+
+ def __init__(self, skills_directory: str | Path):
+ self.skills_directory = Path(skills_directory).resolve()
+ if not self.skills_directory.exists():
+ raise ValueError(f"Skills directory does not exist: {self.skills_directory}")
+
+ self._skill_cache: Dict[str, str] = {}
+
+ # Generate description with available skills embedded
+ description = self._generate_description_with_skills()
+
+ super().__init__(
+ name="skills",
+ description=description,
+ )
+
+ def _generate_description_with_skills(self) -> str:
+ """Generate tool description with available skills embedded."""
+ skills = discover_skills(self.skills_directory)
+ return generate_skills_tool_description(skills)
+
+ def _get_declaration(self) -> types.FunctionDeclaration:
+ return types.FunctionDeclaration(
+ name=self.name,
+ description=self.description,
+ parameters=types.Schema(
+ type=types.Type.OBJECT,
+ properties={
+ "command": types.Schema(
+ type=types.Type.STRING,
+ description='The skill name (no arguments). E.g., "data-analysis" or "pdf-processing"',
+ ),
+ },
+ required=["command"],
+ ),
+ )
+
+ async def run_async(self, *, args: Dict[str, Any], tool_context: ToolContext) -> str:
+ """Execute skill loading by name."""
+ skill_name = args.get("command", "").strip()
+
+ if not skill_name:
+ return "Error: No skill name provided"
+
+ return self._invoke_skill(skill_name)
+
+ def _invoke_skill(self, skill_name: str) -> str:
+ """Load and return the full content of a skill."""
+ # Check cache first
+ if skill_name in self._skill_cache:
+ return self._skill_cache[skill_name]
+
+ try:
+ content = load_skill_content(self.skills_directory, skill_name)
+ formatted_content = self._format_skill_content(skill_name, content)
+
+ # Cache the formatted content
+ self._skill_cache[skill_name] = formatted_content
+
+ return formatted_content
+ except (FileNotFoundError, IOError) as e:
+ logger.error(f"Failed to load skill {skill_name}: {e}")
+ return f"Error loading skill '{skill_name}': {e}"
+ except Exception as e:
+ logger.error(f"An unexpected error occurred while loading skill {skill_name}: {e}")
+ return f"An unexpected error occurred while loading skill '{skill_name}': {e}"
+
+ def _format_skill_content(self, skill_name: str, content: str) -> str:
+ """Format skill content for display to the agent."""
+ header = (
+ f'The "{skill_name}" skill is loading\n\n'
+ f"Base directory for this skill: {self.skills_directory}/{skill_name}\n\n"
+ )
+ footer = (
+ "\n\n---\n"
+ "The skill has been loaded. Follow the instructions above and use the bash tool to execute commands."
+ )
+ return header + content + footer
diff --git a/python/packages/kagent-adk/src/kagent/adk/skills/skills_plugin.py b/python/packages/kagent-adk/src/kagent/adk/tools/skills_plugin.py
similarity index 90%
rename from python/packages/kagent-adk/src/kagent/adk/skills/skills_plugin.py
rename to python/packages/kagent-adk/src/kagent/adk/tools/skills_plugin.py
index 78f8d12ac..9f261c8c3 100644
--- a/python/packages/kagent-adk/src/kagent/adk/skills/skills_plugin.py
+++ b/python/packages/kagent-adk/src/kagent/adk/tools/skills_plugin.py
@@ -5,11 +5,7 @@
from typing import Optional
from google.adk.agents import BaseAgent, LlmAgent
-from google.adk.agents.callback_context import CallbackContext
-from google.adk.plugins import BasePlugin
-from google.genai import types
-from ..artifacts import initialize_session_path
from ..tools import BashTool, EditFileTool, ReadFileTool, WriteFileTool
from .skill_tool import SkillsTool
diff --git a/python/packages/kagent-adk/src/kagent/adk/skills/skills_toolset.py b/python/packages/kagent-adk/src/kagent/adk/tools/skills_toolset.py
similarity index 100%
rename from python/packages/kagent-adk/src/kagent/adk/skills/skills_toolset.py
rename to python/packages/kagent-adk/src/kagent/adk/tools/skills_toolset.py
diff --git a/python/packages/kagent-core/src/kagent/core/tracing/_span_processor.py b/python/packages/kagent-core/src/kagent/core/tracing/_span_processor.py
index 673d1949b..d7ab3c2eb 100644
--- a/python/packages/kagent-core/src/kagent/core/tracing/_span_processor.py
+++ b/python/packages/kagent-core/src/kagent/core/tracing/_span_processor.py
@@ -1,8 +1,8 @@
"""Custom span processor to add kagent attributes to all spans in a request context."""
import logging
-from typing import Optional
from contextvars import Token
+from typing import Optional
from opentelemetry import context as otel_context
from opentelemetry.sdk.trace import ReadableSpan, Span, SpanProcessor
diff --git a/python/packages/kagent-core/src/kagent/core/tracing/_utils.py b/python/packages/kagent-core/src/kagent/core/tracing/_utils.py
index 04309e74f..4684da208 100644
--- a/python/packages/kagent-core/src/kagent/core/tracing/_utils.py
+++ b/python/packages/kagent-core/src/kagent/core/tracing/_utils.py
@@ -5,9 +5,7 @@
from opentelemetry import _logs, trace
from opentelemetry.exporter.otlp.proto.grpc._log_exporter import OTLPLogExporter
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
-from opentelemetry.instrumentation.anthropic import AnthropicInstrumentor
from opentelemetry.instrumentation.fastapi import FastAPIInstrumentor
-from opentelemetry.instrumentation.google_generativeai import GoogleGenerativeAiInstrumentor
from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor
from opentelemetry.instrumentation.openai import OpenAIInstrumentor
from opentelemetry.sdk._events import EventLoggerProvider
@@ -20,6 +18,31 @@
from ._span_processor import KagentAttributesSpanProcessor
+def _instrument_anthropic(event_logger_provider=None):
+ """Instrument Anthropic SDK if available."""
+ try:
+ from opentelemetry.instrumentation.anthropic import AnthropicInstrumentor
+
+ if event_logger_provider:
+ AnthropicInstrumentor(use_legacy_attributes=False).instrument(event_logger_provider=event_logger_provider)
+ else:
+ AnthropicInstrumentor().instrument()
+ except ImportError:
+ # Anthropic SDK is not installed; skipping instrumentation.
+ pass
+
+
+def _instrument_google_generativeai():
+ """Instrument Google GenerativeAI SDK if available."""
+ try:
+ from opentelemetry.instrumentation.google_generativeai import GoogleGenerativeAiInstrumentor
+
+ GoogleGenerativeAiInstrumentor().instrument()
+ except ImportError:
+ # Google GenerativeAI SDK is not installed; skipping instrumentation.
+ pass
+
+
def configure(fastapi_app: FastAPI | None = None):
tracing_enabled = os.getenv("OTEL_TRACING_ENABLED", "false").lower() == "true"
logging_enabled = os.getenv("OTEL_LOGGING_ENABLED", "false").lower() == "true"
@@ -76,10 +99,10 @@ def configure(fastapi_app: FastAPI | None = None):
# Create event logger provider using the configured logger provider
event_logger_provider = EventLoggerProvider(logger_provider)
OpenAIInstrumentor(use_legacy_attributes=False).instrument(event_logger_provider=event_logger_provider)
- AnthropicInstrumentor(use_legacy_attributes=False).instrument(event_logger_provider=event_logger_provider)
+ _instrument_anthropic(event_logger_provider)
else:
# Use legacy attributes (input/output as GenAI span attributes)
logging.info("OpenAI instrumentation configured with legacy GenAI span attributes")
OpenAIInstrumentor().instrument()
- AnthropicInstrumentor().instrument()
- GoogleGenerativeAiInstrumentor().instrument()
+ _instrument_anthropic()
+ _instrument_google_generativeai()
diff --git a/python/packages/kagent-crewai/pyproject.toml b/python/packages/kagent-crewai/pyproject.toml
index ab1af73a7..617db2443 100644
--- a/python/packages/kagent-crewai/pyproject.toml
+++ b/python/packages/kagent-crewai/pyproject.toml
@@ -9,7 +9,7 @@ description = "CrewAI integration for KAgent with A2A server support"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
- "crewai[tools]>=0.193.2,<1.0.0",
+ "crewai[tools]>= 1.2.0",
"httpx>=0.25.0",
"fastapi>=0.100.0",
"pydantic>=2.0.0",
@@ -21,6 +21,7 @@ dependencies = [
"google-genai>=1.21.1"
]
+
[project.optional-dependencies]
dev = [
"pytest>=7.0.0",
@@ -32,6 +33,7 @@ dev = [
[tool.uv.sources]
kagent-core = {workspace = true}
+
[tool.hatch.build.targets.wheel]
packages = ["src/kagent"]
diff --git a/python/packages/kagent-crewai/src/kagent/crewai/_a2a.py b/python/packages/kagent-crewai/src/kagent/crewai/_a2a.py
index 73adc1fc3..9a02290f1 100644
--- a/python/packages/kagent-crewai/src/kagent/crewai/_a2a.py
+++ b/python/packages/kagent-crewai/src/kagent/crewai/_a2a.py
@@ -9,11 +9,11 @@
from a2a.types import AgentCard
from fastapi import FastAPI, Request
from fastapi.responses import PlainTextResponse
+from kagent.core import KAgentConfig, configure_tracing
+from kagent.core.a2a import KAgentRequestContextBuilder, KAgentTaskStore
from opentelemetry.instrumentation.crewai import CrewAIInstrumentor
from crewai import Crew, Flow
-from kagent.core import KAgentConfig, configure_tracing
-from kagent.core.a2a import KAgentRequestContextBuilder, KAgentTaskStore
from ._executor import CrewAIAgentExecutor, CrewAIAgentExecutorConfig
diff --git a/python/packages/kagent-crewai/src/kagent/crewai/_executor.py b/python/packages/kagent-crewai/src/kagent/crewai/_executor.py
index f74244a60..c5f40fe43 100644
--- a/python/packages/kagent-crewai/src/kagent/crewai/_executor.py
+++ b/python/packages/kagent-crewai/src/kagent/crewai/_executor.py
@@ -19,15 +19,14 @@
TaskStatusUpdateEvent,
TextPart,
)
-from pydantic import BaseModel
-
-from crewai import Crew, Flow
-from crewai.memory import LongTermMemory
-
from kagent.core.tracing._span_processor import (
clear_kagent_span_attributes,
set_kagent_span_attributes,
)
+from pydantic import BaseModel
+
+from crewai import Crew, Flow
+from crewai.memory import LongTermMemory
from ._listeners import A2ACrewAIListener
from ._memory import KagentMemoryStorage
diff --git a/python/packages/kagent-crewai/src/kagent/crewai/_listeners.py b/python/packages/kagent-crewai/src/kagent/crewai/_listeners.py
index ecc20e405..38378901c 100644
--- a/python/packages/kagent-crewai/src/kagent/crewai/_listeners.py
+++ b/python/packages/kagent-crewai/src/kagent/crewai/_listeners.py
@@ -15,6 +15,12 @@
TaskStatusUpdateEvent,
TextPart,
)
+from kagent.core.a2a import (
+ A2A_DATA_PART_METADATA_TYPE_FUNCTION_CALL,
+ A2A_DATA_PART_METADATA_TYPE_FUNCTION_RESPONSE,
+ A2A_DATA_PART_METADATA_TYPE_KEY,
+ get_kagent_metadata_key,
+)
from crewai.events import (
AgentExecutionCompletedEvent,
@@ -27,12 +33,6 @@
ToolUsageFinishedEvent,
ToolUsageStartedEvent,
)
-from kagent.core.a2a import (
- A2A_DATA_PART_METADATA_TYPE_FUNCTION_CALL,
- A2A_DATA_PART_METADATA_TYPE_FUNCTION_RESPONSE,
- A2A_DATA_PART_METADATA_TYPE_KEY,
- get_kagent_metadata_key,
-)
class A2ACrewAIListener(BaseEventListener):
diff --git a/python/packages/kagent-openai/README.md b/python/packages/kagent-openai/README.md
new file mode 100644
index 000000000..04fb66f22
--- /dev/null
+++ b/python/packages/kagent-openai/README.md
@@ -0,0 +1,157 @@
+# KAgent OpenAI Agents SDK Integration
+
+OpenAI Agents SDK integration for KAgent with A2A (Agent-to-Agent) protocol support, session management, and optional skills integration.
+
+---
+
+## Quick Start
+
+```python
+from kagent.openai import KAgentApp
+from agents.agent import Agent
+
+# Create your OpenAI agent
+agent = Agent(
+ name="Assistant",
+ instructions="You are a helpful assistant.",
+ tools=[my_tool], # Optional
+)
+
+# Create KAgent app
+app = KAgentApp(
+ agent=agent,
+ agent_card={
+ "name": "my-openai-agent",
+ "description": "My OpenAI agent",
+ "version": "0.1.0",
+ "capabilities": {"streaming": True},
+ "defaultInputModes": ["text"],
+ "defaultOutputModes": ["text"]
+ },
+ kagent_url="http://localhost:8080",
+ app_name="my-agent"
+)
+
+# Run
+fastapi_app = app.build()
+# uvicorn run_me:fastapi_app
+```
+
+---
+
+## Agent with Skills
+
+Skills provide domain expertise through filesystem-based instruction files and helper tools (read/write/edit files, bash execution). We provide a function to load all skill-related tools. Otherwise, you can select the ones you need by importing from `kagent.openai.tools`.
+
+```python
+from agents.agent import Agent
+from kagent.openai import get_skill_tools
+
+tools = [my_custom_tool]
+tools.extend(get_skill_tools("./skills"))
+
+agent = Agent(
+ name="SkillfulAgent",
+ instructions="Use skills and tools when appropriate.",
+ tools=tools,
+)
+```
+
+See [skills README](../../kagent-skills/README.md) for skill format and structure.
+
+---
+
+## Session Management
+
+Sessions persist conversation history in KAgent backend:
+
+```python
+from agents.agent import Agent
+from agents.run import Runner
+from kagent.openai.agent._session_service import KAgentSession
+import httpx
+
+client = httpx.AsyncClient(base_url="http://localhost:8080")
+session = KAgentSession(
+ session_id="conversation_123",
+ client=client,
+ app_name="my-agent",
+)
+
+agent = Agent(name="Assistant", instructions="Be helpful")
+result = await Runner.run(agent, "Hello!", session=session)
+```
+
+---
+
+## Local Development
+
+Test without KAgent backend using in-memory mode:
+
+```python
+app = KAgentApp(
+ agent=agent,
+ agent_card=agent_card,
+ kagent_url="http://localhost:8080",
+ app_name="test-agent"
+)
+
+fastapi_app = app.build_local() # In-memory, no persistence
+```
+
+---
+
+## Deployment
+
+Standard Docker deployment:
+
+```dockerfile
+FROM python:3.13-slim
+WORKDIR /app
+COPY requirements.txt .
+RUN pip install -r requirements.txt
+COPY agent.py .
+CMD ["uvicorn", "agent:fastapi_app", "--host", "0.0.0.0", "--port", "8000"]
+```
+
+Set `KAGENT_URL` environment variable to connect to KAgent backend.
+
+---
+
+## Architecture
+
+| Component | Purpose |
+| ----------------------- | -------------------------------------------- |
+| **KAgentApp** | FastAPI application builder with A2A support |
+| **KAgentSession** | Session persistence via KAgent REST API |
+| **OpenAIAgentExecutor** | Executes agents with event streaming |
+
+---
+
+## Environment Variables
+
+- `KAGENT_URL` - KAgent backend URL (default: http://localhost:8080)
+- `LOG_LEVEL` - Logging level (default: INFO)
+
+---
+
+## Examples
+
+See `samples/openai/` for complete examples:
+
+- `basic_agent/` - Simple agent with custom tools
+- More examples coming soon
+
+---
+
+## See Also
+
+- [OpenAI Agents SDK Docs](https://github.com/openai/agents)
+- [KAgent Skills](../../kagent-skills/README.md)
+- [A2A Protocol](https://docs.kagent.ai/a2a)
+
+---
+
+## License
+
+See repository LICENSE file.
diff --git a/python/packages/kagent-openai/pyproject.toml b/python/packages/kagent-openai/pyproject.toml
new file mode 100644
index 000000000..be912e597
--- /dev/null
+++ b/python/packages/kagent-openai/pyproject.toml
@@ -0,0 +1,47 @@
+[project]
+name = "kagent-openai"
+version = "0.1.0"
+description = "OpenAI integration for KAgent with A2A server support"
+readme = "README.md"
+requires-python = ">=3.13"
+dependencies = [
+ "openai>=1.72.0",
+ "openai-agents>=0.4.0",
+ "a2a-sdk>=0.3.1",
+ "kagent-core",
+ "kagent-skills",
+ "httpx>=0.25.0",
+ "fastapi>=0.100.0",
+ "uvicorn>=0.20.0",
+ "pydantic>=2.0.0",
+ "opentelemetry-instrumentation-openai-agents>=0.48.0"
+]
+
+[project.optional-dependencies]
+dev = [
+ "pytest>=7.0.0",
+ "pytest-asyncio>=0.21.0",
+ "black>=23.0.0",
+ "ruff>=0.1.0",
+]
+
+[tool.uv.sources]
+kagent-core = {workspace = true}
+kagent-skills = {workspace = true}
+
+[build-system]
+requires = ["setuptools>=61.0", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/kagent"]
+
+[tool.black]
+line-length = 120
+target-version = ['py313']
+
+[tool.ruff]
+line-length = 120
+target-version = "py313"
+lint.select = ["E", "F", "I", "N", "W", "UP"]
+lint.ignore = ["E501", "N803", "N806"]
diff --git a/python/packages/kagent-openai/src/kagent/openai/__init__.py b/python/packages/kagent-openai/src/kagent/openai/__init__.py
new file mode 100644
index 000000000..c07cac33b
--- /dev/null
+++ b/python/packages/kagent-openai/src/kagent/openai/__init__.py
@@ -0,0 +1,11 @@
+"""KAgent OpenAI Integration Package.
+
+This package provides OpenAI integrations for KAgent.
+"""
+
+# Re-export from agent subpackage for convenience
+from ._a2a import KAgentApp
+from .tools import get_skill_tools
+
+__all__ = ["KAgentApp", "get_skill_tools"]
+__version__ = "0.1.0"
diff --git a/python/packages/kagent-openai/src/kagent/openai/_a2a.py b/python/packages/kagent-openai/src/kagent/openai/_a2a.py
new file mode 100644
index 000000000..a8e1ca1f2
--- /dev/null
+++ b/python/packages/kagent-openai/src/kagent/openai/_a2a.py
@@ -0,0 +1,240 @@
+"""KAgent OpenAI Agents SDK Application.
+
+This module provides the main KAgentApp class for building FastAPI applications
+that integrate OpenAI Agents SDK with the A2A (Agent-to-Agent) protocol.
+"""
+
+from __future__ import annotations
+
+import faulthandler
+import logging
+import os
+from collections.abc import Callable
+
+import httpx
+from a2a.server.apps import A2AFastAPIApplication
+from a2a.server.request_handlers import DefaultRequestHandler
+from a2a.server.tasks import InMemoryTaskStore
+from a2a.types import AgentCard
+from agents import Agent, set_tracing_disabled
+from fastapi import FastAPI, Request
+from fastapi.responses import PlainTextResponse
+from opentelemetry.instrumentation.openai_agents import OpenAIAgentsInstrumentor
+
+from kagent.core import KAgentConfig, configure_tracing
+from kagent.core.a2a import KAgentRequestContextBuilder, KAgentTaskStore
+
+from ._agent_executor import OpenAIAgentExecutor, OpenAIAgentExecutorConfig
+from ._session_service import KAgentSessionFactory
+
+# Configure logging
+logger = logging.getLogger(__name__)
+
+
+def configure_logging() -> None:
+ """Configure logging based on LOG_LEVEL environment variable."""
+ log_level = os.getenv("LOG_LEVEL", "INFO").upper()
+ numeric_level = getattr(logging, log_level, logging.INFO)
+ logging.basicConfig(level=numeric_level)
+ logging.info(f"Logging configured with level: {log_level}")
+
+
+configure_logging()
+
+
+def health_check(request: Request) -> PlainTextResponse:
+ """Health check endpoint."""
+ return PlainTextResponse("OK")
+
+
+def thread_dump(request: Request) -> PlainTextResponse:
+ """Thread dump endpoint for debugging."""
+ import io
+
+ buf = io.StringIO()
+ faulthandler.dump_traceback(file=buf)
+ buf.seek(0)
+ return PlainTextResponse(buf.read())
+
+
+# Environment variables
+kagent_url_override = os.getenv("KAGENT_URL")
+sts_well_known_uri = os.getenv("STS_WELL_KNOWN_URI")
+
+
+class KAgentApp:
+ """FastAPI application builder for OpenAI Agents SDK with KAgent integration."""
+
+ def __init__(
+ self,
+ agent: Agent | Callable[[], Agent],
+ agent_card: AgentCard,
+ config: KAgentConfig,
+ executor_config: OpenAIAgentExecutorConfig | None = None,
+ tracing: bool = True,
+ ):
+ """Initialize the KAgent application.
+
+ Args:
+ agent: OpenAI Agent instance or factory function
+ agent_card: A2A agent card describing the agent's capabilities
+ kagent_url: URL of the KAgent backend server
+ app_name: Application name for identification
+ config: Optional executor configuration
+ """
+ self.agent = agent
+ self.agent_card = AgentCard.model_validate(agent_card)
+ self.config = config
+ self.executor_config = executor_config or OpenAIAgentExecutorConfig()
+ self.tracing = tracing
+
+ def build(self) -> FastAPI:
+ """Build a production FastAPI application with KAgent integration.
+
+ This creates an application that:
+ - Uses KAgentSessionFactory for session management
+ - Connects to KAgent backend via REST API
+ - Implements A2A protocol handlers
+ - Includes health check endpoints
+
+ Returns:
+ Configured FastAPI application
+ """
+ # Create HTTP client with KAgent backend
+ http_client = httpx.AsyncClient(
+ base_url=kagent_url_override or self.config.kagent_url,
+ )
+
+ # Create session factory
+ session_factory = KAgentSessionFactory(
+ client=http_client,
+ app_name=self.config.app_name,
+ )
+
+ # Create agent executor with session factory
+ agent_executor = OpenAIAgentExecutor(
+ agent=self.agent,
+ app_name=self.config.app_name,
+ session_factory=session_factory.create_session,
+ config=self.executor_config,
+ )
+
+ # Create KAgent task store
+ kagent_task_store = KAgentTaskStore(http_client)
+
+ # Create request context builder and handler
+ request_context_builder = KAgentRequestContextBuilder(task_store=kagent_task_store)
+ request_handler = DefaultRequestHandler(
+ agent_executor=agent_executor,
+ task_store=kagent_task_store,
+ request_context_builder=request_context_builder,
+ )
+
+ # Create A2A FastAPI application
+ a2a_app = A2AFastAPIApplication(
+ agent_card=self.agent_card,
+ http_handler=request_handler,
+ )
+
+ # Enable fault handler
+ faulthandler.enable()
+
+ # Create FastAPI app with lifespan
+ app = FastAPI()
+
+ if self.tracing:
+ try:
+ # Set OpenAI tracing disabled and set custom OTEL tracing to be enabled
+ logger.info("Configuring tracing for KAgent OpenAI app")
+ set_tracing_disabled(True)
+ configure_tracing(app)
+
+ # Configure tracing for OpenAI Agents SDK
+ tracing_enabled = os.getenv("OTEL_TRACING_ENABLED", "false").lower() == "true"
+ if tracing_enabled:
+ logger.info("Enabling OpenAI Agents SDK tracing")
+ OpenAIAgentsInstrumentor().instrument()
+
+ logger.info("Tracing configured for KAgent OpenAI app")
+ except Exception as e:
+ logger.error(f"Failed to configure tracing: {e}")
+
+ # Add health check endpoints
+ app.add_route("/health", methods=["GET"], route=health_check)
+ app.add_route("/thread_dump", methods=["GET"], route=thread_dump)
+
+ # Add A2A routes
+ a2a_app.add_routes_to_app(app)
+
+ return app
+
+ def build_local(self) -> FastAPI:
+ """Build a local FastAPI application for testing without KAgent backend.
+
+ This creates an application that:
+ - Uses InMemoryTaskStore (no KAgent backend needed)
+ - Runs agents without session persistence
+ - Useful for local development and testing
+
+ Returns:
+ Configured FastAPI application for local use
+ """
+ # Create agent executor without session factory (no persistence)
+ agent_executor = OpenAIAgentExecutor(
+ agent=self.agent,
+ app_name=self.config.app_name,
+ session_factory=None, # No session persistence in local mode
+ config=self.executor_config,
+ )
+ # Use in-memory task store
+ task_store = InMemoryTaskStore()
+
+ # Create request context builder and handler
+ request_context_builder = KAgentRequestContextBuilder(task_store=task_store)
+ request_handler = DefaultRequestHandler(
+ agent_executor=agent_executor,
+ task_store=task_store,
+ request_context_builder=request_context_builder,
+ )
+
+ # Create A2A FastAPI application
+ a2a_app = A2AFastAPIApplication(
+ agent_card=self.agent_card,
+ http_handler=request_handler,
+ )
+
+ # Enable fault handler
+ faulthandler.enable()
+
+ # Create FastAPI app
+ app = FastAPI()
+
+ # Add health check endpoints
+ app.add_route("/health", methods=["GET"], route=health_check)
+ app.add_route("/thread_dump", methods=["GET"], route=thread_dump)
+
+ # Add A2A routes
+ a2a_app.add_routes_to_app(app)
+
+ return app
+
+ async def test(self, task: str) -> None:
+ """Test the agent with a simple task.
+
+ Args:
+ task: The task/question to ask the agent
+ """
+ from agents.run import Runner
+
+ # Resolve agent
+ if callable(self.agent):
+ agent = self.agent()
+ else:
+ agent = self.agent
+
+ logger.info(f"\n>>> User Query: {task}")
+
+ # Run the agent
+ result = await Runner.run(agent, task)
+
+ logger.info(f">>> Agent Response: {result.final_output}")
diff --git a/python/packages/kagent-openai/src/kagent/openai/_agent_executor.py b/python/packages/kagent-openai/src/kagent/openai/_agent_executor.py
new file mode 100644
index 000000000..cd4d49511
--- /dev/null
+++ b/python/packages/kagent-openai/src/kagent/openai/_agent_executor.py
@@ -0,0 +1,331 @@
+"""OpenAI Agent Executor for A2A Protocol.
+
+This module implements an agent executor that runs OpenAI Agents SDK agents
+within the A2A (Agent-to-Agent) protocol, converting streaming events to A2A events.
+"""
+
+from __future__ import annotations
+
+import asyncio
+import logging
+import uuid
+from collections.abc import Callable
+from dataclasses import dataclass
+from datetime import UTC, datetime
+from typing import override
+
+from a2a.server.agent_execution import AgentExecutor
+from a2a.server.agent_execution.context import RequestContext
+from a2a.server.events.event_queue import EventQueue
+from a2a.types import (
+ Artifact,
+ Message,
+ Part,
+ Role,
+ TaskArtifactUpdateEvent,
+ TaskState,
+ TaskStatus,
+ TaskStatusUpdateEvent,
+ TextPart,
+)
+from agents.agent import Agent
+from agents.run import Runner
+from pydantic import BaseModel
+
+from kagent.core.a2a import TaskResultAggregator, get_kagent_metadata_key
+
+from ._event_converter import convert_openai_event_to_a2a_events
+from ._session_service import KAgentSession
+
+logger = logging.getLogger(__name__)
+
+
+class OpenAIAgentExecutorConfig(BaseModel):
+ """Configuration for the OpenAIAgentExecutor."""
+
+ # Maximum time to wait for agent execution (seconds)
+ execution_timeout: float = 300.0
+
+
+@dataclass
+class SessionContext:
+ """Context information for a KAgent session."""
+
+ session_id: str
+
+
+class OpenAIAgentExecutor(AgentExecutor):
+ """An AgentExecutor that runs OpenAI Agents SDK agents against A2A requests.
+
+ This executor integrates OpenAI Agents SDK with the A2A protocol, handling
+ session management, event streaming, and result aggregation.
+ """
+
+ def __init__(
+ self,
+ *,
+ agent: Agent | Callable[[], Agent],
+ app_name: str,
+ session_factory: Callable[[str, str], KAgentSession] | None = None,
+ config: OpenAIAgentExecutorConfig | None = None,
+ ):
+ """Initialize the executor.
+
+ Args:
+ agent: OpenAI Agent instance or factory function that returns an agent
+ app_name: Application name for session management
+ session_factory: Optional factory for creating KAgentSession instances
+ config: Optional executor configuration
+ """
+ super().__init__()
+ self._agent = agent
+ self.app_name = app_name
+ self._session_factory = session_factory
+ self._config = config or OpenAIAgentExecutorConfig()
+
+ def _resolve_agent(self) -> Agent:
+ """Resolve the agent, handling both instances and factory functions."""
+ if callable(self._agent):
+ # Call the factory to get the agent
+ return self._agent()
+ return self._agent
+
+ async def _stream_agent_events(
+ self,
+ agent: Agent,
+ user_input: str,
+ session: KAgentSession | None,
+ context: RequestContext,
+ event_queue: EventQueue,
+ ) -> None:
+ """Stream agent execution events and convert them to A2A events."""
+ task_result_aggregator = TaskResultAggregator()
+ session_context = SessionContext(session_id=session.session_id)
+
+ try:
+ # Use run_streamed for streaming support
+ result = Runner.run_streamed(
+ agent,
+ user_input,
+ session=session,
+ context=session_context,
+ )
+
+ # Process streaming events
+ async for event in result.stream_events():
+ # Convert OpenAI event to A2A events
+ a2a_events = convert_openai_event_to_a2a_events(
+ event,
+ context.task_id,
+ context.context_id,
+ self.app_name,
+ )
+
+ for a2a_event in a2a_events:
+ task_result_aggregator.process_event(a2a_event)
+ await event_queue.enqueue_event(a2a_event)
+
+ # Handle final output
+ if hasattr(result, "final_output") and result.final_output:
+ final_message = Message(
+ message_id=str(uuid.uuid4()),
+ role=Role.agent,
+ parts=[Part(TextPart(text=str(result.final_output)))],
+ )
+
+ # Publish final artifact
+ await event_queue.enqueue_event(
+ TaskArtifactUpdateEvent(
+ task_id=context.task_id,
+ last_chunk=True,
+ context_id=context.context_id,
+ artifact=Artifact(
+ artifact_id=str(uuid.uuid4()),
+ parts=final_message.parts,
+ ),
+ )
+ )
+
+ # Publish completion status
+ await event_queue.enqueue_event(
+ TaskStatusUpdateEvent(
+ task_id=context.task_id,
+ status=TaskStatus(
+ state=TaskState.completed,
+ timestamp=datetime.now(UTC).isoformat(),
+ ),
+ context_id=context.context_id,
+ final=True,
+ )
+ )
+ else:
+ # No output - publish based on aggregator state
+ if (
+ task_result_aggregator.task_state == TaskState.working
+ and task_result_aggregator.task_status_message is not None
+ and task_result_aggregator.task_status_message.parts
+ ):
+ await event_queue.enqueue_event(
+ TaskArtifactUpdateEvent(
+ task_id=context.task_id,
+ last_chunk=True,
+ context_id=context.context_id,
+ artifact=Artifact(
+ artifact_id=str(uuid.uuid4()),
+ parts=task_result_aggregator.task_status_message.parts,
+ ),
+ )
+ )
+ await event_queue.enqueue_event(
+ TaskStatusUpdateEvent(
+ task_id=context.task_id,
+ status=TaskStatus(
+ state=TaskState.completed,
+ timestamp=datetime.now(UTC).isoformat(),
+ ),
+ context_id=context.context_id,
+ final=True,
+ )
+ )
+ else:
+ await event_queue.enqueue_event(
+ TaskStatusUpdateEvent(
+ task_id=context.task_id,
+ status=TaskStatus(
+ state=task_result_aggregator.task_state,
+ timestamp=datetime.now(UTC).isoformat(),
+ message=task_result_aggregator.task_status_message,
+ ),
+ context_id=context.context_id,
+ final=True,
+ )
+ )
+
+ except Exception as e:
+ logger.error(f"Error during agent execution: {e}", exc_info=True)
+ raise
+
+ @override
+ async def cancel(self, context: RequestContext, event_queue: EventQueue):
+ """Cancel the execution."""
+ # TODO: Implement proper cancellation logic if needed
+ raise NotImplementedError("Cancellation is not implemented")
+
+ @override
+ async def execute(
+ self,
+ context: RequestContext,
+ event_queue: EventQueue,
+ ):
+ """Execute the OpenAI agent and publish updates to the event queue."""
+ if not context.message:
+ raise ValueError("A2A request must have a message")
+
+ # Send task submitted event for new tasks
+ if not context.current_task:
+ await event_queue.enqueue_event(
+ TaskStatusUpdateEvent(
+ task_id=context.task_id,
+ status=TaskStatus(
+ state=TaskState.submitted,
+ message=context.message,
+ timestamp=datetime.now(UTC).isoformat(),
+ ),
+ context_id=context.context_id,
+ final=False,
+ )
+ )
+
+ # Extract session ID from context
+ session_id = getattr(context, "session_id", None) or context.context_id
+ user_id = getattr(context, "user_id", "admin@kagent.dev")
+
+ # Send working status
+ await event_queue.enqueue_event(
+ TaskStatusUpdateEvent(
+ task_id=context.task_id,
+ status=TaskStatus(
+ state=TaskState.working,
+ timestamp=datetime.now(UTC).isoformat(),
+ ),
+ context_id=context.context_id,
+ final=False,
+ metadata={
+ get_kagent_metadata_key("app_name"): self.app_name,
+ get_kagent_metadata_key("session_id"): session_id,
+ get_kagent_metadata_key("user_id"): user_id,
+ },
+ )
+ )
+
+ try:
+ # Resolve the agent
+ agent = self._resolve_agent()
+
+ # Get user input from A2A message
+ user_input = context.get_user_input()
+
+ # Create session if factory is provided
+ session = None
+ if self._session_factory:
+ session = self._session_factory(session_id, user_id)
+
+ # Stream agent execution
+ await asyncio.wait_for(
+ self._stream_agent_events(
+ agent,
+ user_input,
+ session,
+ context,
+ event_queue,
+ ),
+ timeout=self._config.execution_timeout,
+ )
+
+ except TimeoutError:
+ logger.error(f"Agent execution timed out after {self._config.execution_timeout} seconds")
+ await event_queue.enqueue_event(
+ TaskStatusUpdateEvent(
+ task_id=context.task_id,
+ status=TaskStatus(
+ state=TaskState.failed,
+ timestamp=datetime.now(UTC).isoformat(),
+ message=Message(
+ message_id=str(uuid.uuid4()),
+ role=Role.agent,
+ parts=[Part(TextPart(text="Execution timed out"))],
+ ),
+ ),
+ context_id=context.context_id,
+ final=True,
+ )
+ )
+ except Exception as e:
+ logger.error(f"Error during OpenAI agent execution: {e}", exc_info=True)
+
+ error_message = str(e)
+
+ await event_queue.enqueue_event(
+ TaskStatusUpdateEvent(
+ task_id=context.task_id,
+ status=TaskStatus(
+ state=TaskState.failed,
+ timestamp=datetime.now(UTC).isoformat(),
+ message=Message(
+ message_id=str(uuid.uuid4()),
+ role=Role.agent,
+ parts=[Part(TextPart(text=f"Execution failed: {error_message}"))],
+ metadata={
+ get_kagent_metadata_key("error_type"): type(e).__name__,
+ get_kagent_metadata_key("error_detail"): error_message,
+ },
+ ),
+ ),
+ context_id=context.context_id,
+ final=True,
+ metadata={
+ get_kagent_metadata_key("error_type"): type(e).__name__,
+ get_kagent_metadata_key("error_detail"): error_message,
+ },
+ )
+ )
diff --git a/python/packages/kagent-openai/src/kagent/openai/_event_converter.py b/python/packages/kagent-openai/src/kagent/openai/_event_converter.py
new file mode 100644
index 000000000..14fbe5483
--- /dev/null
+++ b/python/packages/kagent-openai/src/kagent/openai/_event_converter.py
@@ -0,0 +1,372 @@
+"""Event converter for OpenAI Agents SDK to A2A protocol.
+
+This module converts OpenAI Agents SDK streaming events to A2A protocol events.
+"""
+
+from __future__ import annotations
+
+import json
+import logging
+import uuid
+from datetime import UTC, datetime
+
+from a2a.server.events import Event as A2AEvent
+from a2a.types import (
+ DataPart,
+ Message,
+ Role,
+ TaskState,
+ TaskStatus,
+ TaskStatusUpdateEvent,
+ TextPart,
+)
+from a2a.types import Part as A2APart
+from agents.items import MessageOutputItem, ToolCallItem, ToolCallOutputItem
+from agents.stream_events import (
+ AgentUpdatedStreamEvent,
+ RawResponsesStreamEvent,
+ RunItemStreamEvent,
+ StreamEvent,
+)
+
+from kagent.core.a2a import (
+ A2A_DATA_PART_METADATA_TYPE_FUNCTION_CALL,
+ A2A_DATA_PART_METADATA_TYPE_FUNCTION_RESPONSE,
+ A2A_DATA_PART_METADATA_TYPE_KEY,
+ get_kagent_metadata_key,
+)
+
+logger = logging.getLogger(__name__)
+
+
+def convert_openai_event_to_a2a_events(
+ event: StreamEvent,
+ task_id: str,
+ context_id: str,
+ app_name: str,
+) -> list[A2AEvent]:
+ """Convert an OpenAI Agents SDK event to A2A events.
+
+ Args:
+ event: OpenAI SDK streaming event
+ task_id: A2A task ID
+ context_id: A2A context ID
+ app_name: Application name for metadata
+
+ Returns:
+ List of A2A events (may be empty if event doesn't need conversion)
+ """
+ a2a_events: list[A2AEvent] = []
+
+ try:
+ # Handle RunItemStreamEvent (messages, tool calls, tool outputs)
+ if isinstance(event, RunItemStreamEvent):
+ a2a_events.extend(_convert_run_item_event(event, task_id, context_id, app_name))
+
+ # Handle RawResponsesStreamEvent (raw LLM responses)
+ elif isinstance(event, RawResponsesStreamEvent):
+ # These are low-level events - can be logged but not converted
+ logger.debug(f"Raw response event: {event.data}")
+
+ # Handle AgentUpdatedStreamEvent (agent handoffs)
+ elif isinstance(event, AgentUpdatedStreamEvent):
+ a2a_events.extend(_convert_agent_updated_event(event, task_id, context_id, app_name))
+
+ # Other event types
+ else:
+ logger.debug(f"Unhandled event type: {type(event).__name__}")
+
+ except Exception as e:
+ logger.error(f"Error converting OpenAI event to A2A: {e}", exc_info=True)
+ # Don't raise - we want to continue processing other events
+
+ return a2a_events
+
+
+def _convert_run_item_event(
+ event: RunItemStreamEvent,
+ task_id: str,
+ context_id: str,
+ app_name: str,
+) -> list[A2AEvent]:
+ """Convert a RunItemStreamEvent to A2A events.
+
+ Args:
+ event: OpenAI run item stream event
+ task_id: A2A task ID
+ context_id: A2A context ID
+ app_name: Application name
+
+ Returns:
+ List containing A2A events based on the item type
+ """
+ # Handle message output
+ if isinstance(event.item, MessageOutputItem):
+ return _convert_message_output(event.item, task_id, context_id, app_name)
+
+ # Handle tool calls
+ elif isinstance(event.item, ToolCallItem):
+ return _convert_tool_call(event.item, task_id, context_id, app_name)
+
+ # Handle tool outputs
+ elif isinstance(event.item, ToolCallOutputItem):
+ return _convert_tool_output(event.item, task_id, context_id, app_name)
+
+ # Other item types
+ else:
+ logger.debug(f"Unhandled run item type: {type(event.item).__name__}")
+ return []
+
+
+def _convert_message_output(
+ item: MessageOutputItem,
+ task_id: str,
+ context_id: str,
+ app_name: str,
+) -> list[A2AEvent]:
+ """Convert a message output item to A2A event.
+
+ MessageOutputItem.raw_item is a ResponseOutputMessage with content list.
+ Each content item is either ResponseOutputText or ResponseOutputRefusal.
+ """
+ text_parts = []
+
+ # Access the raw Pydantic model
+ raw_message = item.raw_item
+
+ # Iterate through content parts
+ if hasattr(raw_message, "content") and raw_message.content:
+ for part in raw_message.content:
+ # Check if this is a text part (ResponseOutputText has 'text' field)
+ if hasattr(part, "text"):
+ text_parts.append(part.text)
+ # Otherwise, it is ResponseOutputRefusal and the model will explain why
+ elif hasattr(part, "refusal"):
+ text_parts.append(f"[Refusal] {part.refusal}")
+
+ if not text_parts:
+ return []
+
+ text_content = "".join(text_parts)
+
+ message = Message(
+ message_id=str(uuid.uuid4()),
+ role=Role.agent,
+ parts=[A2APart(TextPart(text=text_content))],
+ metadata={
+ get_kagent_metadata_key("app_name"): app_name,
+ get_kagent_metadata_key("event_type"): "message_output",
+ },
+ )
+
+ status_event = TaskStatusUpdateEvent(
+ task_id=task_id,
+ context_id=context_id,
+ status=TaskStatus(
+ state=TaskState.working,
+ message=message,
+ timestamp=datetime.now(UTC).isoformat(),
+ ),
+ metadata={
+ get_kagent_metadata_key("app_name"): app_name,
+ },
+ final=False,
+ )
+
+ return [status_event]
+
+
+def _convert_tool_call(
+ item: ToolCallItem,
+ task_id: str,
+ context_id: str,
+ app_name: str,
+) -> list[A2AEvent]:
+ """Convert a tool call item to A2A event.
+
+ ToolCallItem.raw_item is typically ResponseFunctionToolCall with fields at top level:
+ - name: str (tool name)
+ - call_id: str (unique ID for this call)
+ - arguments: str (JSON string)
+ - id: Optional[str] (alternate ID field)
+ """
+ raw_call = item.raw_item
+
+ # Extract tool call details from the raw item (fields are at top level)
+ tool_name = raw_call.name if hasattr(raw_call, "name") else "unknown"
+ call_id = (
+ raw_call.call_id
+ if hasattr(raw_call, "call_id")
+ else (raw_call.id if hasattr(raw_call, "id") else str(uuid.uuid4()))
+ )
+ tool_arguments = {}
+
+ # Arguments are a JSON string, need to parse them
+ if hasattr(raw_call, "arguments"):
+ try:
+ tool_arguments = (
+ json.loads(raw_call.arguments) if isinstance(raw_call.arguments, str) else raw_call.arguments
+ )
+ except (json.JSONDecodeError, TypeError):
+ logger.warning(f"Failed to parse arguments: {raw_call.arguments}")
+ tool_arguments = {"raw": str(raw_call.arguments)}
+
+ # Create a DataPart for the function call
+ # Note: Frontend expects 'args' not 'arguments', and 'id' for the call ID
+ function_data = {
+ "id": call_id,
+ "name": tool_name,
+ "args": tool_arguments,
+ }
+
+ data_part = DataPart(
+ data=function_data,
+ metadata={
+ get_kagent_metadata_key(A2A_DATA_PART_METADATA_TYPE_KEY): A2A_DATA_PART_METADATA_TYPE_FUNCTION_CALL,
+ },
+ )
+
+ message = Message(
+ message_id=str(uuid.uuid4()),
+ role=Role.agent,
+ parts=[A2APart(data_part)],
+ metadata={
+ get_kagent_metadata_key("app_name"): app_name,
+ get_kagent_metadata_key("event_type"): "tool_call",
+ },
+ )
+
+ status_event = TaskStatusUpdateEvent(
+ task_id=task_id,
+ context_id=context_id,
+ status=TaskStatus(
+ state=TaskState.working,
+ message=message,
+ timestamp=datetime.now(UTC).isoformat(),
+ ),
+ metadata={
+ get_kagent_metadata_key("app_name"): app_name,
+ },
+ final=False,
+ )
+
+ return [status_event]
+
+
+def _convert_tool_output(
+ item: ToolCallOutputItem,
+ task_id: str,
+ context_id: str,
+ app_name: str,
+) -> list[A2AEvent]:
+ """Convert a tool output item to A2A event.
+
+ ToolCallOutputItem contains:
+ - raw_item: FunctionCallOutput | ComputerCallOutput | LocalShellCallOutput
+ - output: The actual Python object returned by the tool
+ """
+ raw_output = item.raw_item
+
+ # Extract tool output details from the raw item
+ call_id = raw_output.call_id if hasattr(raw_output, "call_id") else str(uuid.uuid4())
+
+ # item.output is the actual return value (Any)
+ actual_output: str = item.output
+
+ # Create a DataPart for the function response
+ function_data = {
+ "id": call_id,
+ "name": call_id, # Name is not returned by the tool
+ "response": {"result": actual_output},
+ }
+
+ data_part = DataPart(
+ data=function_data,
+ metadata={
+ get_kagent_metadata_key(A2A_DATA_PART_METADATA_TYPE_KEY): A2A_DATA_PART_METADATA_TYPE_FUNCTION_RESPONSE,
+ },
+ )
+
+ message = Message(
+ message_id=str(uuid.uuid4()),
+ role=Role.agent,
+ parts=[A2APart(data_part)],
+ metadata={
+ get_kagent_metadata_key("app_name"): app_name,
+ get_kagent_metadata_key("event_type"): "tool_output",
+ },
+ )
+
+ status_event = TaskStatusUpdateEvent(
+ task_id=task_id,
+ context_id=context_id,
+ status=TaskStatus(
+ state=TaskState.working,
+ message=message,
+ timestamp=datetime.now(UTC).isoformat(),
+ ),
+ metadata={
+ get_kagent_metadata_key("app_name"): app_name,
+ },
+ final=False,
+ )
+
+ return [status_event]
+
+
+def _convert_agent_updated_event(
+ event: AgentUpdatedStreamEvent,
+ task_id: str,
+ context_id: str,
+ app_name: str,
+) -> list[A2AEvent]:
+ """Convert an agent updated event (handoff) to A2A event.
+
+ This is converted to a function_call event so the frontend renders it
+ using the AgentCallDisplay component. This is ideal if there are multiple handoffs.
+ """
+ agent_name = event.new_agent.name
+ if "/" in agent_name:
+ tool_name = agent_name.replace("/", "__NS__")
+ else:
+ tool_name = f"{agent_name}__NS__agent"
+
+ function_data = {
+ "id": str(uuid.uuid4()),
+ "name": tool_name,
+ "args": {"target_agent": agent_name},
+ }
+
+ data_part = DataPart(
+ data=function_data,
+ metadata={
+ get_kagent_metadata_key(A2A_DATA_PART_METADATA_TYPE_KEY): A2A_DATA_PART_METADATA_TYPE_FUNCTION_CALL,
+ },
+ )
+
+ message = Message(
+ message_id=str(uuid.uuid4()),
+ role=Role.agent,
+ parts=[A2APart(data_part)],
+ metadata={
+ get_kagent_metadata_key("app_name"): app_name,
+ get_kagent_metadata_key("event_type"): "agent_handoff",
+ get_kagent_metadata_key("new_agent_name"): agent_name,
+ },
+ )
+
+ status_event = TaskStatusUpdateEvent(
+ task_id=task_id,
+ context_id=context_id,
+ status=TaskStatus(
+ state=TaskState.working,
+ message=message,
+ timestamp=datetime.now(UTC).isoformat(),
+ ),
+ metadata={
+ get_kagent_metadata_key("app_name"): app_name,
+ },
+ final=False,
+ )
+
+ return [status_event]
diff --git a/python/packages/kagent-openai/src/kagent/openai/_session_service.py b/python/packages/kagent-openai/src/kagent/openai/_session_service.py
new file mode 100644
index 000000000..19ea158e5
--- /dev/null
+++ b/python/packages/kagent-openai/src/kagent/openai/_session_service.py
@@ -0,0 +1,285 @@
+"""KAgent Session Service for OpenAI Agents SDK.
+
+This module implements the OpenAI Agents SDK SessionABC protocol,
+storing session data in the KAgent backend via REST API.
+"""
+
+from __future__ import annotations
+
+import logging
+
+import httpx
+from agents.items import TResponseInputItem
+from agents.memory.session import SessionABC
+
+logger = logging.getLogger(__name__)
+
+
+class KAgentSession(SessionABC):
+ """A session implementation that uses the KAgent API.
+
+ This session integrates with the KAgent server to manage session state
+ and persistence through HTTP API calls, implementing the OpenAI Agents SDK
+ SessionABC protocol.
+ """
+
+ def __init__(
+ self,
+ session_id: str,
+ client: httpx.AsyncClient,
+ app_name: str,
+ user_id: str,
+ ):
+ """Initialize a KAgent session.
+
+ Args:
+ session_id: Unique identifier for this session
+ client: HTTP client for making API calls
+ app_name: Application name for session tracking
+ user_id: User identifier for session scoping
+ """
+ self.session_id = session_id
+ self.client = client
+ self.app_name = app_name
+ self.user_id = user_id
+ self._items_cache: list[TResponseInputItem] | None = None
+
+ async def _ensure_session_exists(self) -> None:
+ """Ensure the session exists in KAgent backend, creating if needed."""
+ try:
+ # Try to get the session
+ response = await self.client.get(
+ f"/api/sessions/{self.session_id}?user_id={self.user_id}&limit=0",
+ headers={"X-User-ID": self.user_id, "X-Agent-Name": self.app_name},
+ )
+ if response.status_code == 404:
+ # Session doesn't exist, create it
+ await self._create_session()
+ else:
+ response.raise_for_status()
+ except httpx.HTTPStatusError as e:
+ if e.response.status_code == 404:
+ await self._create_session()
+ else:
+ raise
+
+ async def _create_session(self) -> None:
+ """Create a new session in KAgent backend."""
+ request_data = {
+ "id": self.session_id,
+ "user_id": self.user_id,
+ "agent_ref": self.app_name,
+ }
+
+ response = await self.client.post(
+ "/api/sessions",
+ json=request_data,
+ headers={"X-User-ID": self.user_id, "X-Agent-Name": self.app_name},
+ )
+ response.raise_for_status()
+
+ data = response.json()
+ if not data.get("data"):
+ raise RuntimeError(f"Failed to create session: {data.get('message', 'Unknown error')}")
+
+ logger.debug(f"Created session {self.session_id} for user {self.user_id}")
+
+ async def get_items(self, limit: int | None = None) -> list[TResponseInputItem]:
+ """Retrieve conversation history for this session.
+
+ Args:
+ limit: Maximum number of items to retrieve (None for all)
+
+ Returns:
+ List of conversation items from the session
+ """
+ try:
+ # Build URL with limit parameter
+ url = f"/api/sessions/{self.session_id}?user_id={self.user_id}"
+ if limit is not None:
+ url += f"&limit={limit}"
+ else:
+ url += "&limit=-1" # -1 means all items
+
+ response = await self.client.get(
+ url,
+ headers={"X-User-ID": self.user_id, "X-Agent-Name": self.app_name},
+ )
+
+ if response.status_code == 404:
+ # Session doesn't exist yet, return empty list
+ return []
+
+ response.raise_for_status()
+ data = response.json()
+
+ if not data.get("data") or not data["data"].get("events"):
+ return []
+
+ # Convert stored events back to OpenAI items format
+ items: list[TResponseInputItem] = []
+ events_data = data["data"]["events"]
+
+ for event_data in events_data:
+ # Events are stored as JSON strings in the 'data' field
+ event_json = event_data.get("data")
+ if event_json:
+ # Parse the event and extract items if they exist
+ import json
+
+ try:
+ event_obj = json.loads(event_json)
+ # Look for items in the event
+ if "items" in event_obj:
+ items.extend(event_obj["items"])
+ except (json.JSONDecodeError, TypeError) as e:
+ logger.warning(f"Failed to parse event data: {e}")
+ continue
+
+ # Apply limit if specified
+ if limit is not None and limit > 0:
+ items = items[-limit:]
+
+ self._items_cache = items
+ return items
+
+ except httpx.HTTPStatusError as e:
+ if e.response.status_code == 404:
+ return []
+ raise
+
+ async def add_items(self, items: list[TResponseInputItem]) -> None:
+ """Store new items for this session.
+
+ Args:
+ items: List of conversation items to add to the session
+ """
+ if not items:
+ return
+
+ # Ensure session exists before adding items
+ await self._ensure_session_exists()
+
+ # Store items as an event in the session
+ import json
+ import uuid
+ from datetime import UTC, datetime
+
+ event_data = {
+ "id": str(uuid.uuid4()),
+ "data": json.dumps(
+ {
+ "timestamp": datetime.now(UTC).isoformat(),
+ "items": items,
+ "type": "conversation_items",
+ }
+ ),
+ }
+
+ response = await self.client.post(
+ f"/api/sessions/{self.session_id}/events?user_id={self.user_id}",
+ json=event_data,
+ headers={"X-User-ID": self.user_id, "X-Agent-Name": self.app_name},
+ )
+ response.raise_for_status()
+
+ # Update cache
+ if self._items_cache is not None:
+ self._items_cache.extend(items)
+
+ logger.debug(f"Added {len(items)} items to session {self.session_id}")
+
+ async def pop_item(self) -> TResponseInputItem | None:
+ """Remove and return the most recent item from this session.
+
+ Returns:
+ The most recent item, or None if session is empty
+ """
+ # Get all items
+ items = await self.get_items()
+
+ if not items:
+ return None
+
+ # Pop the last item
+ last_item = items.pop()
+
+ # Clear the session and re-add remaining items
+ # This is inefficient but matches the expected behavior
+ # A production implementation might use a more efficient approach
+ await self.clear_session()
+ if items:
+ await self.add_items(items)
+
+ # Update cache
+ self._items_cache = items
+
+ return last_item
+
+ async def clear_session(self) -> None:
+ """Clear all items for this session."""
+ try:
+ # Delete the session from KAgent backend
+ response = await self.client.delete(
+ f"/api/sessions/{self.session_id}?user_id={self.user_id}",
+ headers={"X-User-ID": self.user_id, "X-Agent-Name": self.app_name},
+ )
+ response.raise_for_status()
+
+ # Clear cache
+ self._items_cache = None
+
+ logger.debug(f"Cleared session {self.session_id}")
+
+ except httpx.HTTPStatusError as e:
+ if e.response.status_code == 404:
+ # Session doesn't exist, that's fine
+ self._items_cache = None
+ else:
+ raise
+
+
+class KAgentSessionFactory:
+ """Factory for creating KAgent sessions.
+
+ This factory manages the HTTP client and configuration needed to create
+ KAgentSession instances that communicate with the KAgent backend.
+ """
+
+ def __init__(
+ self,
+ client: httpx.AsyncClient,
+ app_name: str,
+ default_user_id: str = "admin@kagent.dev",
+ ):
+ """Initialize the session factory.
+
+ Args:
+ client: HTTP client for making API calls to KAgent
+ app_name: Application name for session tracking
+ default_user_id: Default user ID if not specified per session
+ """
+ self.client = client
+ self.app_name = app_name
+ self.default_user_id = default_user_id
+
+ def create_session(
+ self,
+ session_id: str,
+ user_id: str | None = None,
+ ) -> KAgentSession:
+ """Create a new session instance.
+
+ Args:
+ session_id: Unique identifier for the session
+ user_id: Optional user ID (uses default if not provided)
+
+ Returns:
+ A new KAgentSession instance
+ """
+ return KAgentSession(
+ session_id=session_id,
+ client=self.client,
+ app_name=self.app_name,
+ user_id=user_id or self.default_user_id,
+ )
diff --git a/python/packages/kagent-openai/src/kagent/openai/tools/__init__.py b/python/packages/kagent-openai/src/kagent/openai/tools/__init__.py
new file mode 100644
index 000000000..c9363e0d2
--- /dev/null
+++ b/python/packages/kagent-openai/src/kagent/openai/tools/__init__.py
@@ -0,0 +1,3 @@
+from ._tools import bash, edit_file, get_skill_tool, get_skill_tools, read_file, write_file
+
+__all__ = ["edit_file", "write_file", "read_file", "bash", "get_skill_tool", "get_skill_tools"]
diff --git a/python/packages/kagent-openai/src/kagent/openai/tools/_tools.py b/python/packages/kagent-openai/src/kagent/openai/tools/_tools.py
new file mode 100644
index 000000000..67140388d
--- /dev/null
+++ b/python/packages/kagent-openai/src/kagent/openai/tools/_tools.py
@@ -0,0 +1,182 @@
+"""File operation and skill tools for agents.
+
+This module provides Read, Write, Edit, Bash, and Skills tools that agents can use.
+These tools are wrappers around the centralized logic in the kagent-skills package.
+"""
+
+from __future__ import annotations
+
+import logging
+from pathlib import Path
+
+from agents.exceptions import UserError
+from agents.run_context import RunContextWrapper
+from agents.tool import FunctionTool, function_tool
+
+from kagent.skills import (
+ discover_skills,
+ edit_file_content,
+ execute_command,
+ generate_skills_tool_description,
+ get_bash_description,
+ get_edit_file_description,
+ get_read_file_description,
+ get_session_path,
+ get_write_file_description,
+ initialize_session_path,
+ load_skill_content,
+ read_file_content,
+ write_file_content,
+)
+
+from .._agent_executor import SessionContext
+
+logger = logging.getLogger(__name__)
+
+
+# --- System Tools ---
+
+
+@function_tool(
+ name_override="read_file",
+ description_override=get_read_file_description(),
+)
+def read_file(
+ wrapper: RunContextWrapper[SessionContext],
+ file_path: str,
+ offset: int | None = None,
+ limit: int | None = None,
+) -> str:
+ """Read a file from the filesystem."""
+ try:
+ session_id = wrapper.context.session_id
+ working_dir = get_session_path(session_id)
+ path = Path(file_path)
+ if not path.is_absolute():
+ path = working_dir / path
+
+ return read_file_content(path, offset, limit)
+ except (FileNotFoundError, IsADirectoryError, OSError) as e:
+ raise UserError(str(e)) from e
+
+
+@function_tool(
+ name_override="write_file",
+ description_override=get_write_file_description(),
+)
+def write_file(wrapper: RunContextWrapper[SessionContext], file_path: str, content: str) -> str:
+ """Write content to a file."""
+ try:
+ session_id = wrapper.context.session_id
+ working_dir = get_session_path(session_id)
+ path = Path(file_path)
+ if not path.is_absolute():
+ path = working_dir / path
+
+ return write_file_content(path, content)
+ except OSError as e:
+ raise UserError(str(e)) from e
+
+
+@function_tool(
+ name_override="edit_file",
+ description_override=get_edit_file_description(),
+)
+def edit_file(
+ wrapper: RunContextWrapper[SessionContext],
+ file_path: str,
+ old_string: str,
+ new_string: str,
+ replace_all: bool = False,
+) -> str:
+ """Edit a file by replacing old_string with new_string."""
+ try:
+ session_id = wrapper.context.session_id
+ working_dir = get_session_path(session_id)
+ path = Path(file_path)
+ if not path.is_absolute():
+ path = working_dir / path
+
+ return edit_file_content(path, old_string, new_string, replace_all)
+ except (FileNotFoundError, IsADirectoryError, ValueError, OSError) as e:
+ raise UserError(str(e)) from e
+
+
+@function_tool(
+ name_override="bash",
+ description_override=get_bash_description(),
+)
+async def bash(wrapper: RunContextWrapper[SessionContext], command: str) -> str:
+ """Executes a bash command in a sandboxed environment."""
+ try:
+ session_id = wrapper.context.session_id
+ working_dir = get_session_path(session_id)
+ return await execute_command(command, working_dir)
+ except Exception as e:
+ raise UserError(f"Error executing command: {e}") from e
+
+
+# --- Skill Tools ---
+
+
+def get_skill_tool(skills_directory: str | Path = "/skills") -> FunctionTool:
+ """Create a Skill tool.
+
+ This function generates a tool instance with skills discovered from the provided
+ directory, following the ADK pattern.
+ """
+ skills_dir = Path(skills_directory)
+ if not skills_dir.exists():
+ raise ValueError(f"Skills directory does not exist: {skills_dir}")
+
+ # Discover skills and generate the tool description.
+ skills = discover_skills(skills_dir)
+ description = generate_skills_tool_description(skills)
+
+ @function_tool(name_override="skills", description_override=description)
+ def skill_tool_impl(wrapper: RunContextWrapper[SessionContext], command: str) -> str:
+ """Execute a skill by name.
+
+ Args:
+ command: The name of the skill to execute (e.g., "data-analysis")
+
+ Returns:
+ The full skill instructions and context.
+ """
+ # This function is cached internally, so calling it multiple times is safe.
+ initialize_session_path(wrapper.context.session_id, str(skills_dir))
+ skill_name = command.strip()
+
+ try:
+ content = load_skill_content(skills_dir, skill_name)
+
+ # Mimic ADK's formatting
+ header = (
+ f'The "{skill_name}" skill is loading\n\n'
+ f"Base directory for this skill: {skills_dir.resolve()}/{skill_name}\n\n"
+ )
+ footer = (
+ "\n\n---\n"
+ "The skill has been loaded. Follow the instructions above and use the bash tool to execute commands."
+ )
+ return header + content + footer
+
+ except (FileNotFoundError, OSError) as e:
+ return f"Error loading skill '{skill_name}': {e}"
+ except Exception as e:
+ return f"An unexpected error occurred while loading skill '{skill_name}': {e}"
+
+ return skill_tool_impl
+
+
+def get_skill_tools(skills_directory: str | Path = "/skills") -> list[FunctionTool]:
+ """
+ Create a list of tools including the skill tool and file operation tools.
+
+ Args:
+ skills_directory: Path to the directory containing skills.
+
+ Returns:
+ A list of FunctionTool instances: skills tool, read_file, write_file, edit_file
+ """
+ return [get_skill_tool(skills_directory), read_file, write_file, edit_file, bash]
diff --git a/python/packages/kagent-skills/.python-version b/python/packages/kagent-skills/.python-version
new file mode 100644
index 000000000..976544ccb
--- /dev/null
+++ b/python/packages/kagent-skills/.python-version
@@ -0,0 +1 @@
+3.13.7
diff --git a/python/packages/kagent-skills/README.md b/python/packages/kagent-skills/README.md
new file mode 100644
index 000000000..776710c29
--- /dev/null
+++ b/python/packages/kagent-skills/README.md
@@ -0,0 +1,5 @@
+# KAgent Skills
+
+Core library for discovering, parsing, and loading KAgent skills from the filesystem.
+
+For example usage, see `kagent-adk` and `kagent-openai` packages.
diff --git a/python/packages/kagent-skills/pyproject.toml b/python/packages/kagent-skills/pyproject.toml
new file mode 100644
index 000000000..0eccd09c0
--- /dev/null
+++ b/python/packages/kagent-skills/pyproject.toml
@@ -0,0 +1,35 @@
+[project]
+name = "kagent-skills"
+version = "0.1.0"
+description = "Core library for discovering and loading KAgent skills."
+readme = "README.md"
+requires-python = ">=3.13"
+dependencies = [
+ "pydantic>=2.0.0",
+ "pyyaml>=6.0"
+]
+
+[project.optional-dependencies]
+dev = [
+ "pytest>=7.0.0",
+ "pytest-asyncio>=0.21.0",
+ "black>=23.0.0",
+ "ruff>=0.1.0",
+]
+
+[build-system]
+requires = ["setuptools>=61.0", "wheel"]
+build-backend = "setuptools.build_meta"
+
+[tool.hatch.build.targets.wheel]
+packages = ["src/kagent"]
+
+[tool.black]
+line-length = 120
+target-version = ['py313']
+
+[tool.ruff]
+line-length = 120
+target-version = "py313"
+lint.select = ["E", "F", "I", "N", "W", "UP"]
+lint.ignore = ["E501", "N803", "N806"]
diff --git a/python/packages/kagent-skills/src/kagent/skills/__init__.py b/python/packages/kagent-skills/src/kagent/skills/__init__.py
new file mode 100644
index 000000000..6e5e7b359
--- /dev/null
+++ b/python/packages/kagent-skills/src/kagent/skills/__init__.py
@@ -0,0 +1,38 @@
+from .discovery import discover_skills, load_skill_content
+from .models import Skill
+from .prompts import (
+ generate_skills_tool_description,
+ get_bash_description,
+ get_edit_file_description,
+ get_read_file_description,
+ get_write_file_description,
+)
+from .session import (
+ clear_session_cache,
+ get_session_path,
+ initialize_session_path,
+)
+from .shell import (
+ edit_file_content,
+ execute_command,
+ read_file_content,
+ write_file_content,
+)
+
+__all__ = [
+ "discover_skills",
+ "load_skill_content",
+ "Skill",
+ "read_file_content",
+ "write_file_content",
+ "edit_file_content",
+ "execute_command",
+ "generate_skills_tool_description",
+ "get_read_file_description",
+ "get_write_file_description",
+ "get_edit_file_description",
+ "get_bash_description",
+ "initialize_session_path",
+ "get_session_path",
+ "clear_session_cache",
+]
diff --git a/python/packages/kagent-skills/src/kagent/skills/discovery.py b/python/packages/kagent-skills/src/kagent/skills/discovery.py
new file mode 100644
index 000000000..646707357
--- /dev/null
+++ b/python/packages/kagent-skills/src/kagent/skills/discovery.py
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+import logging
+from pathlib import Path
+
+import yaml
+
+from .models import Skill
+
+logger = logging.getLogger(__name__)
+
+
+def parse_skill_metadata(skill_file: Path) -> dict[str, str] | None:
+ """Parse YAML frontmatter from a SKILL.md file."""
+ try:
+ with open(skill_file, encoding="utf-8") as f:
+ content = f.read()
+
+ if not content.startswith("---"):
+ return None
+
+ parts = content.split("---", 2)
+ if len(parts) < 3:
+ return None
+
+ metadata = yaml.safe_load(parts[1])
+ if isinstance(metadata, dict) and "name" in metadata and "description" in metadata:
+ return {
+ "name": metadata["name"],
+ "description": metadata["description"],
+ }
+ return None
+ except Exception as e:
+ logger.error(f"Failed to parse metadata from {skill_file}: {e}")
+ return None
+
+
+def discover_skills(skills_directory: Path) -> list[Skill]:
+ """Discover available skills and return their metadata."""
+ if not skills_directory.exists():
+ logger.warning(f"Skills directory not found: {skills_directory}")
+ return []
+
+ skills = []
+ for skill_dir in sorted(skills_directory.iterdir()):
+ if not skill_dir.is_dir():
+ continue
+
+ skill_file = skill_dir / "SKILL.md"
+ if not skill_file.exists():
+ continue
+
+ try:
+ metadata = parse_skill_metadata(skill_file)
+ if metadata:
+ skills.append(Skill(**metadata))
+ except Exception as e:
+ logger.error(f"Failed to parse skill {skill_dir.name}: {e}")
+
+ return skills
+
+
+def load_skill_content(skills_directory: Path, skill_name: str) -> str:
+ """Load and return the full content of a skill's SKILL.md file."""
+ # Find skill directory
+ skill_dir = skills_directory / skill_name
+ if not skill_dir.exists() or not skill_dir.is_dir():
+ raise FileNotFoundError(f"Skill '{skill_name}' not found in {skills_directory}")
+
+ skill_file = skill_dir / "SKILL.md"
+ if not skill_file.exists():
+ raise FileNotFoundError(f"Skill '{skill_name}' has no SKILL.md file in {skill_dir}")
+
+ try:
+ with open(skill_file, encoding="utf-8") as f:
+ content = f.read()
+ return content
+ except Exception as e:
+ logger.error(f"Failed to load skill {skill_name}: {e}")
+ raise OSError(f"Error loading skill '{skill_name}': {e}") from e
diff --git a/python/packages/kagent-skills/src/kagent/skills/models.py b/python/packages/kagent-skills/src/kagent/skills/models.py
new file mode 100644
index 000000000..9432011d6
--- /dev/null
+++ b/python/packages/kagent-skills/src/kagent/skills/models.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Skill(BaseModel):
+ """Represents the metadata for a skill.
+
+ This is a simple data container used during the initial skill discovery
+ phase to hold the information parsed from a skill's SKILL.md frontmatter.
+ """
+
+ name: str
+ """The unique name/identifier of the skill."""
+
+ description: str
+ """A description of what the skill does and when to use it."""
+
+ license: str | None = None
+ """Optional license information for the skill."""
diff --git a/python/packages/kagent-skills/src/kagent/skills/prompts.py b/python/packages/kagent-skills/src/kagent/skills/prompts.py
new file mode 100644
index 000000000..5be19165a
--- /dev/null
+++ b/python/packages/kagent-skills/src/kagent/skills/prompts.py
@@ -0,0 +1,116 @@
+from .models import Skill
+
+
+def generate_skills_xml(skills: list[Skill]) -> str:
+ """Formats a list of skills into an XML block for tool descriptions."""
+ if not skills:
+ return "\n\n"
+
+ skills_entries = []
+ for skill in skills:
+ skill_xml = f"\n{skill.name}\n{skill.description}\n"
+ skills_entries.append(skill_xml)
+
+ return "\n" + "\n".join(skills_entries) + "\n"
+
+
+def generate_skills_tool_description(skills: list[Skill]) -> str:
+ """Generates the full, standardized description for the 'skills' tool."""
+ skills_xml = generate_skills_xml(skills)
+
+ # This description is based on the ADK version, which is the source of truth.
+ description = f"""Execute a skill within the main conversation
+
+
+When users ask you to perform tasks, check if any of the available skills below can help complete the task more effectively. Skills provide specialized capabilities and domain knowledge.
+
+How to use skills:
+- Invoke skills using this tool with the skill name only (no arguments)
+- When you invoke a skill, the skill's full SKILL.md will load with detailed instructions
+- Follow the skill's instructions and use the bash tool to execute commands
+- Examples:
+ - command: \"data-analysis\" - invoke the data-analysis skill
+ - command: \"pdf-processing\" - invoke the pdf-processing skill
+
+Important:
+- Only use skills listed in below
+- Do not invoke a skill that is already loaded in the conversation
+- After loading a skill, use the bash tool for execution
+- If not specified, scripts are located in the skill-name/scripts subdirectory
+
+
+{skills_xml}
+"""
+ return description
+
+
+def get_read_file_description() -> str:
+ """Returns the standardized description for the read_file tool."""
+ return """Reads a file from the filesystem with line numbers.
+
+Usage:
+- Provide a path to the file (absolute or relative to your working directory)
+- Returns content with line numbers (format: LINE_NUMBER|CONTENT)
+- Optional offset and limit parameters for reading specific line ranges
+- Lines longer than 2000 characters are truncated
+- Always read a file before editing it
+- You can read from skills/ directory, uploads/, outputs/, or any file in your session
+"""
+
+
+def get_write_file_description() -> str:
+ """Returns the standardized description for the write_file tool."""
+ return """Writes content to a file on the filesystem.
+
+Usage:
+- Provide a path (absolute or relative to working directory) and content to write
+- Overwrites existing files
+- Creates parent directories if needed
+- For existing files, read them first using read_file
+- Prefer editing existing files over writing new ones
+- You can write to your working directory, outputs/, or any writable location
+- Note: skills/ directory is read-only
+"""
+
+
+def get_edit_file_description() -> str:
+ """Returns the standardized description for the edit_file tool."""
+ return """Performs exact string replacements in files.
+
+Usage:
+- You must read the file first using read_file
+- Provide path (absolute or relative to working directory)
+- When editing, preserve exact indentation from the file content
+- Do NOT include line number prefixes in old_string or new_string
+- old_string must be unique unless replace_all=true
+- Use replace_all to rename variables/strings throughout the file
+- old_string and new_string must be different
+- Note: skills/ directory is read-only
+"""
+
+
+def get_bash_description() -> str:
+ """Returns the standardized description for the bash tool."""
+ # This combines the useful parts from both ADK and OpenAI descriptions
+ return """Execute bash commands in the skills environment with sandbox protection.
+
+Working Directory & Structure:
+- Commands run in a temporary session directory: /tmp/kagent/{session_id}/
+- /skills -> All skills are available here (read-only).
+- Your current working directory and /skills are added to PYTHONPATH.
+
+Python Imports (CRITICAL):
+- To import from a skill, use the name of the skill.
+ Example: from skills_name.module import function
+- If the skills name contains a dash '-', you need to use importlib to import it.
+ Example:
+ import importlib
+ skill_module = importlib.import_module('skill-name.module')
+
+For file operations:
+- Use read_file, write_file, and edit_file for interacting with the filesystem.
+
+Timeouts:
+- python scripts: 60s
+- other commands: 30s
+"""
diff --git a/python/packages/kagent-skills/src/kagent/skills/session.py b/python/packages/kagent-skills/src/kagent/skills/session.py
new file mode 100644
index 000000000..0c9b67f15
--- /dev/null
+++ b/python/packages/kagent-skills/src/kagent/skills/session.py
@@ -0,0 +1,97 @@
+"""Manages isolated filesystem paths for agent sessions."""
+
+import logging
+import tempfile
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+# Cache of initialized session paths to avoid re-creating symlinks
+_session_path_cache: dict[str, Path] = {}
+
+
+def initialize_session_path(session_id: str, skills_directory: str) -> Path:
+ """Initialize a session's working directory with skills symlink.
+
+ Creates the directory structure and symlink to the skills directory.
+
+ Directory structure:
+ /tmp/kagent/{session_id}/
+ ├── skills/ -> symlink to skills_directory (read-only shared skills)
+ ├── uploads/ -> staged user files (temporary)
+ └── outputs/ -> generated files for return
+
+ Args:
+ session_id: The unique ID of the current session.
+ skills_directory: Path to the shared skills directory.
+
+ Returns:
+ The resolved path to the session's root directory.
+ """
+ # Return cached path if already initialized
+ if session_id in _session_path_cache:
+ return _session_path_cache[session_id]
+
+ # Initialize new session path
+ base_path = Path(tempfile.gettempdir()) / "kagent"
+ session_path = base_path / session_id
+
+ # Create working directories
+ (session_path / "uploads").mkdir(parents=True, exist_ok=True)
+ (session_path / "outputs").mkdir(parents=True, exist_ok=True)
+
+ # Create symlink to skills directory
+ skills_mount = Path(skills_directory)
+ skills_link = session_path / "skills"
+ if skills_mount.exists() and not skills_link.exists():
+ try:
+ skills_link.symlink_to(skills_mount)
+ logger.debug(f"Created symlink: {skills_link} -> {skills_mount}")
+ except FileExistsError:
+ # Symlink already exists (race condition from concurrent session setup)
+ pass
+ except Exception as e:
+ # Log but don't fail - skills can still be accessed via absolute path
+ logger.warning(f"Failed to create skills symlink for session {session_id}: {e}")
+
+ # Cache and return
+ resolved_path = session_path.resolve()
+ _session_path_cache[session_id] = resolved_path
+ return resolved_path
+
+
+def get_session_path(session_id: str) -> Path:
+ """Get the working directory path for a session.
+
+ This function retrieves the cached session path. If the session hasn't been
+ initialized, it falls back to auto-initialization with default /skills directory.
+
+ Args:
+ session_id: The unique ID of the current session.
+
+ Returns:
+ The resolved path to the session's root directory.
+ """
+ # Return cached path if already initialized
+ if session_id in _session_path_cache:
+ return _session_path_cache[session_id]
+
+ # Fallback: auto-initialize with default /skills
+ logger.warning(
+ f"Session {session_id} not initialized. "
+ f"Auto-initializing with default /skills. "
+ f"For custom skills directories, ensure the executor performs initialization."
+ )
+ return initialize_session_path(session_id, "/skills")
+
+
+def clear_session_cache(session_id: str | None = None) -> None:
+ """Clear cached session path(s).
+
+ Args:
+ session_id: Specific session to clear. If None, clears all cached sessions.
+ """
+ if session_id:
+ _session_path_cache.pop(session_id, None)
+ else:
+ _session_path_cache.clear()
diff --git a/python/packages/kagent-skills/src/kagent/skills/shell.py b/python/packages/kagent-skills/src/kagent/skills/shell.py
new file mode 100644
index 000000000..dc5db1a26
--- /dev/null
+++ b/python/packages/kagent-skills/src/kagent/skills/shell.py
@@ -0,0 +1,175 @@
+"""Core, framework-agnostic logic for system tools (file and shell operations)."""
+
+from __future__ import annotations
+
+import asyncio
+import logging
+import os
+from pathlib import Path
+
+logger = logging.getLogger(__name__)
+
+
+# --- File Operation Tools ---
+
+
+def read_file_content(
+ file_path: Path,
+ offset: int | None = None,
+ limit: int | None = None,
+) -> str:
+ """Reads a file with line numbers, raising errors on failure."""
+ if not file_path.exists():
+ raise FileNotFoundError(f"File not found: {file_path}")
+
+ if not file_path.is_file():
+ raise IsADirectoryError(f"Path is not a file: {file_path}")
+
+ try:
+ lines = file_path.read_text(encoding="utf-8").splitlines()
+ except Exception as e:
+ raise OSError(f"Error reading file {file_path}: {e}") from e
+
+ start = (offset - 1) if offset and offset > 0 else 0
+ end = (start + limit) if limit else len(lines)
+
+ result_lines = []
+ for i, line in enumerate(lines[start:end], start=start + 1):
+ if len(line) > 2000:
+ line = line[:2000] + "..."
+ result_lines.append(f"{i:6d}|{line}")
+
+ if not result_lines:
+ return "File is empty."
+
+ return "\n".join(result_lines)
+
+
+def write_file_content(file_path: Path, content: str) -> str:
+ """Writes content to a file, creating parent directories if needed."""
+ try:
+ file_path.parent.mkdir(parents=True, exist_ok=True)
+ file_path.write_text(content, encoding="utf-8")
+ logger.info(f"Successfully wrote to {file_path}")
+ return f"Successfully wrote to {file_path}"
+ except Exception as e:
+ raise OSError(f"Error writing file {file_path}: {e}") from e
+
+
+def edit_file_content(
+ file_path: Path,
+ old_string: str,
+ new_string: str,
+ replace_all: bool = False,
+) -> str:
+ """Performs an exact string replacement in a file."""
+ if old_string == new_string:
+ raise ValueError("old_string and new_string must be different")
+
+ if not file_path.exists():
+ raise FileNotFoundError(f"File not found: {file_path}")
+
+ if not file_path.is_file():
+ raise IsADirectoryError(f"Path is not a file: {file_path}")
+
+ try:
+ content = file_path.read_text(encoding="utf-8")
+ except Exception as e:
+ raise OSError(f"Error reading file {file_path}: {e}") from e
+
+ if old_string not in content:
+ raise ValueError(f"old_string not found in {file_path}")
+
+ count = content.count(old_string)
+ if not replace_all and count > 1:
+ raise ValueError(
+ f"old_string appears {count} times in {file_path}. Provide more context or set replace_all=true."
+ )
+
+ if replace_all:
+ new_content = content.replace(old_string, new_string)
+ else:
+ new_content = content.replace(old_string, new_string, 1)
+
+ try:
+ file_path.write_text(new_content, encoding="utf-8")
+ logger.info(f"Successfully replaced {count} occurrence(s) in {file_path}")
+ return f"Successfully replaced {count} occurrence(s) in {file_path}"
+ except Exception as e:
+ raise OSError(f"Error writing file {file_path}: {e}") from e
+
+
+# --- Shell Operation Tools ---
+
+
+def _get_command_timeout_seconds(command: str) -> float:
+ """Determine appropriate timeout for a command."""
+ if "python " in command or "python3 " in command:
+ return 60.0 # 1 minute for python scripts
+ else:
+ return 30.0 # 30 seconds for other commands
+
+
+async def execute_command(
+ command: str,
+ working_dir: Path,
+) -> str:
+ """Executes a shell command in a sandboxed environment."""
+ timeout = _get_command_timeout_seconds(command)
+
+ env = os.environ.copy()
+ # Add skills directory and working directory to PYTHONPATH
+ pythonpath_additions = [str(working_dir), "/skills"]
+ if "PYTHONPATH" in env:
+ pythonpath_additions.append(env["PYTHONPATH"])
+ env["PYTHONPATH"] = ":".join(pythonpath_additions)
+
+ # If a separate venv for shell commands is specified, use its python and pip
+ # Otherwise the system python/pip will be used for backward compatibility
+ bash_venv_path = os.environ.get("BASH_VENV_PATH")
+ if bash_venv_path:
+ bash_venv_bin = os.path.join(bash_venv_path, "bin")
+ # Prepend bash venv to PATH so its python and pip are used
+ env["PATH"] = f"{bash_venv_bin}:{env.get('PATH', '')}"
+ env["VIRTUAL_ENV"] = bash_venv_path
+
+ sandboxed_command = f'srt "{command}"'
+
+ try:
+ process = await asyncio.create_subprocess_shell(
+ sandboxed_command,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.PIPE,
+ cwd=working_dir,
+ env=env,
+ )
+
+ try:
+ stdout, stderr = await asyncio.wait_for(process.communicate(), timeout=timeout)
+ except asyncio.TimeoutError:
+ process.kill()
+ await process.wait()
+ return f"Error: Command timed out after {timeout}s"
+
+ stdout_str = stdout.decode("utf-8", errors="replace") if stdout else ""
+ stderr_str = stderr.decode("utf-8", errors="replace") if stderr else ""
+
+ if process.returncode != 0:
+ error_msg = f"Command failed with exit code {process.returncode}"
+ if stderr_str:
+ error_msg += f":\n{stderr_str}"
+ elif stdout_str:
+ error_msg += f":\n{stdout_str}"
+ return error_msg
+
+ output = stdout_str
+ if stderr_str and "WARNING" not in stderr_str:
+ output += f"\n{stderr_str}"
+
+ logger.info(f"Command executed successfully: {output}")
+
+ return output.strip() if output.strip() else "Command completed successfully."
+
+ except Exception as e:
+ logger.error(f"Error executing command: {e}")
+ return f"Error: {e}"
diff --git a/python/packages/kagent-skills/src/kagent/tests/unittests/test_skill_execution.py b/python/packages/kagent-skills/src/kagent/tests/unittests/test_skill_execution.py
new file mode 100644
index 000000000..54da83483
--- /dev/null
+++ b/python/packages/kagent-skills/src/kagent/tests/unittests/test_skill_execution.py
@@ -0,0 +1,142 @@
+import json
+import shutil
+import tempfile
+import textwrap
+from pathlib import Path
+
+import pytest
+
+from kagent.skills import (
+ discover_skills,
+ execute_command,
+ load_skill_content,
+ read_file_content,
+)
+
+
+@pytest.fixture
+def skill_test_env() -> Path:
+ """
+ Creates a temporary environment that mimics a real session and ensures cleanup.
+
+ This fixture manually creates and deletes the temporary directory structure
+ to guarantee that no files are left behind after the test run.
+ """
+ # 1. Create a single top-level temporary directory
+ top_level_dir = Path(tempfile.mkdtemp())
+
+ try:
+ session_dir = top_level_dir / "session"
+ skills_root_dir = top_level_dir / "skills_root"
+
+ # 2. Create session directories
+ (session_dir / "uploads").mkdir(parents=True, exist_ok=True)
+ (session_dir / "outputs").mkdir(parents=True, exist_ok=True)
+
+ # 3. Create the skill to be tested
+ skill_dir = skills_root_dir / "csv-to-json"
+ script_dir = skill_dir / "scripts"
+ script_dir.mkdir(parents=True, exist_ok=True)
+
+ # SKILL.md
+ (skill_dir / "SKILL.md").write_text(
+ textwrap.dedent("""\
+---
+ name: csv-to-json
+ description: Converts a CSV file to a JSON file.
+ ---
+ # CSV to JSON Conversion
+ Use the `convert.py` script to convert a CSV file from the `uploads` directory
+ to a JSON file in the `outputs` directory.
+ Example: `bash("python skills/csv-to-json/scripts/convert.py uploads/data.csv outputs/result.json")`
+ """)
+ )
+
+ # Python script for the skill
+ (script_dir / "convert.py").write_text(
+ textwrap.dedent("""
+ import csv
+ import json
+ import sys
+ if len(sys.argv) != 3:
+ print(f"Usage: python {sys.argv[0]} ")
+ sys.exit(1)
+ input_path, output_path = sys.argv[1], sys.argv[2]
+ try:
+ data = []
+ with open(input_path, 'r', encoding='utf-8') as f:
+ reader = csv.DictReader(f)
+ for row in reader:
+ data.append(row)
+ with open(output_path, 'w', encoding='utf-8') as f:
+ json.dump(data, f, indent=2)
+ print(f"Successfully converted {input_path} to {output_path}")
+ except FileNotFoundError:
+ print(f"Error: Input file not found at {input_path}")
+ sys.exit(1)
+ """)
+ )
+
+ # 4. Create a symlink from the session to the skills root
+ (session_dir / "skills").symlink_to(skills_root_dir, target_is_directory=True)
+
+ # 5. Yield the session directory path to the test
+ yield session_dir
+
+ finally:
+ # 6. Explicitly clean up the entire temporary directory
+ shutil.rmtree(top_level_dir)
+
+
+@pytest.mark.asyncio
+async def test_skill_core_logic(skill_test_env: Path):
+ """
+ Tests the core logic of the 'csv-to-json' skill by directly
+ calling the centralized tool functions.
+ """
+ session_dir = skill_test_env
+
+ # 1. "Upload" a file for the skill to process
+ input_csv_path = session_dir / "uploads" / "data.csv"
+ input_csv_path.write_text("id,name\n1,Alice\n2,Bob\n")
+
+ # 2. Execute the skill's core command, just as an agent would
+ # We use the centralized `execute_command` function directly
+ command = "python skills/csv-to-json/scripts/convert.py uploads/data.csv outputs/result.json"
+ result = await execute_command(command, working_dir=session_dir)
+
+ assert "Successfully converted" in result
+
+ # 3. Verify the output by reading the generated file
+ # We use the centralized `read_file_content` function directly
+ output_json_path = session_dir / "outputs" / "result.json"
+
+ # The read_file_content function returns a string with line numbers,
+ # so we need to parse it.
+ raw_output = read_file_content(output_json_path)
+ json_content_str = "\n".join(line.split("|", 1)[1] for line in raw_output.splitlines())
+
+ # Assert the content is correct
+ expected_data = [{"id": "1", "name": "Alice"}, {"id": "2", "name": "Bob"}]
+ assert json.loads(json_content_str) == expected_data
+
+
+def test_skill_discovery_and_loading(skill_test_env: Path):
+ """
+ Tests the core logic of discovering a skill and loading its instructions.
+ """
+ # The fixture creates the session dir, the skills are one level up in a separate dir
+ skills_root_dir = skill_test_env.parent / "skills_root"
+
+ # 1. Test skill discovery
+ discovered = discover_skills(skills_root_dir)
+ assert len(discovered) == 1
+ skill_meta = discovered[0]
+ assert skill_meta.name == "csv-to-json"
+ assert "Converts a CSV file" in skill_meta.description
+
+ # 2. Test skill content loading
+ skill_content = load_skill_content(skills_root_dir, "csv-to-json")
+ assert "name: csv-to-json" in skill_content
+ assert "# CSV to JSON Conversion" in skill_content
+ assert 'Example: `bash("python skills/csv-to-json/scripts/convert.py' in skill_content
diff --git a/python/pyproject.toml b/python/pyproject.toml
index 3ced52033..6dd299bec 100644
--- a/python/pyproject.toml
+++ b/python/pyproject.toml
@@ -1,5 +1,5 @@
[tool.uv.workspace]
-members = ["packages/*", "samples/adk/*", "samples/langgraph/*", "samples/crewai/*"]
+members = ["packages/*", "samples/adk/*", "samples/langgraph/*", "samples/crewai/*", "samples/openai/*"]
[dependency-groups]
dev = [
diff --git a/python/samples/crewai/poem_flow/Dockerfile b/python/samples/crewai/poem_flow/Dockerfile
index c25697dab..0bca43887 100644
--- a/python/samples/crewai/poem_flow/Dockerfile
+++ b/python/samples/crewai/poem_flow/Dockerfile
@@ -20,7 +20,7 @@ COPY .python-version .python-version
COPY uv.lock uv.lock
# Install dependencies
-RUN uv venv && uv sync --locked --no-dev --package poem-flow \
+RUN uv venv && uv sync --no-dev --package poem-flow \
&& uv cache clean
# Set environment variables
diff --git a/python/samples/crewai/poem_flow/pyproject.toml b/python/samples/crewai/poem_flow/pyproject.toml
index d681717f7..9d75a80ab 100644
--- a/python/samples/crewai/poem_flow/pyproject.toml
+++ b/python/samples/crewai/poem_flow/pyproject.toml
@@ -6,7 +6,6 @@ readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"kagent-crewai",
- "crewai[tools]>=0.193.2,<1.0.0"
]
[build-system]
diff --git a/python/samples/crewai/research-crew/pyproject.toml b/python/samples/crewai/research-crew/pyproject.toml
index 4c38d8a8f..6eb2e707f 100644
--- a/python/samples/crewai/research-crew/pyproject.toml
+++ b/python/samples/crewai/research-crew/pyproject.toml
@@ -6,7 +6,6 @@ readme = "README.md"
requires-python = ">=3.12"
dependencies = [
"kagent-crewai",
- "crewai[tools]>=0.193.2,<1.0.0"
]
[build-system]
diff --git a/python/samples/openai/basic_agent/.gitignore b/python/samples/openai/basic_agent/.gitignore
new file mode 100644
index 000000000..fbb800b59
--- /dev/null
+++ b/python/samples/openai/basic_agent/.gitignore
@@ -0,0 +1,45 @@
+# Python
+__pycache__/
+*.py[cod]
+*$py.class
+*.so
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+
+# Virtual environments
+.venv/
+venv/
+ENV/
+env/
+
+# Environment variables
+.env
+
+# IDE
+.vscode/
+.idea/
+*.swp
+*.swo
+*~
+
+# OS
+.DS_Store
+Thumbs.db
+
+# Logs
+*.log
+
diff --git a/python/samples/openai/basic_agent/Dockerfile b/python/samples/openai/basic_agent/Dockerfile
new file mode 100644
index 000000000..4f9216593
--- /dev/null
+++ b/python/samples/openai/basic_agent/Dockerfile
@@ -0,0 +1,34 @@
+### STAGE 1: base image
+FROM ghcr.io/astral-sh/uv:python3.13-trixie-slim
+
+WORKDIR /app
+
+# Install system dependencies
+RUN apt-get update && apt-get install -y \
+ build-essential \
+ && rm -rf /var/lib/apt/lists/*
+
+# Copy project files
+COPY pyproject.toml pyproject.toml
+COPY packages packages
+COPY samples/openai/basic_agent/ ./samples/openai/basic_agent/
+COPY README.md README.md
+COPY .python-version .python-version
+COPY uv.lock uv.lock
+
+# Install dependencies
+RUN uv venv && uv sync --no-dev --package basic-openai-agent
+
+# Expose port
+EXPOSE 8080
+ENV PORT=8080
+ENV VIRTUAL_ENV=/app/.venv
+ENV PATH="/app/.venv/bin:$PATH"
+
+# Health check
+HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
+ CMD curl -f http://localhost:8080/health || exit 1
+
+# Run the agent
+CMD ["python", "samples/openai/basic_agent/basic_agent/agent.py"]
+
diff --git a/python/packages/kagent-adk/tests/unittests/__init__.py b/python/samples/openai/basic_agent/README.md
similarity index 100%
rename from python/packages/kagent-adk/tests/unittests/__init__.py
rename to python/samples/openai/basic_agent/README.md
diff --git a/python/samples/openai/basic_agent/agent.yaml b/python/samples/openai/basic_agent/agent.yaml
new file mode 100644
index 000000000..24c02b0d6
--- /dev/null
+++ b/python/samples/openai/basic_agent/agent.yaml
@@ -0,0 +1,16 @@
+apiVersion: kagent.dev/v1alpha2
+kind: Agent
+metadata:
+ name: basic-openai-agent
+spec:
+ description: A basic OpenAI agent with calculator and weather tools
+ type: BYO
+ byo:
+ deployment:
+ image: localhost:5001/basic-openai:latest
+ env:
+ - name: OPENAI_API_KEY
+ valueFrom:
+ secretKeyRef:
+ name: kagent-openai
+ key: OPENAI_API_KEY
diff --git a/python/samples/openai/basic_agent/basic_agent/__init__.py b/python/samples/openai/basic_agent/basic_agent/__init__.py
new file mode 100644
index 000000000..f848aef28
--- /dev/null
+++ b/python/samples/openai/basic_agent/basic_agent/__init__.py
@@ -0,0 +1,5 @@
+"""Basic OpenAI Agent sample for KAgent."""
+
+from .agent import app
+
+__all__ = ["app"]
diff --git a/python/samples/openai/basic_agent/basic_agent/agent-card.json b/python/samples/openai/basic_agent/basic_agent/agent-card.json
new file mode 100644
index 000000000..da1c46f48
--- /dev/null
+++ b/python/samples/openai/basic_agent/basic_agent/agent-card.json
@@ -0,0 +1,20 @@
+{
+ "name": "basic-openai-agent",
+ "description": "A basic OpenAI agent with calculator and weather tools",
+ "url": "localhost:8000",
+ "version": "0.1.0",
+ "capabilities": {
+ "streaming": true
+ },
+ "defaultInputModes": ["text"],
+ "defaultOutputModes": ["text"],
+ "skills": [
+ {
+ "id": "basic",
+ "name": "Basic Assistant",
+ "description": "Can perform calculations and get weather information",
+ "tags": ["calculator", "weather", "assistant"]
+ }
+ ]
+}
+
diff --git a/python/samples/openai/basic_agent/basic_agent/agent.py b/python/samples/openai/basic_agent/basic_agent/agent.py
new file mode 100644
index 000000000..3148af3e5
--- /dev/null
+++ b/python/samples/openai/basic_agent/basic_agent/agent.py
@@ -0,0 +1,119 @@
+"""Basic OpenAI Agent with KAgent integration.
+
+This sample demonstrates how to create a simple OpenAI agent that can:
+- Answer questions
+- Use tools (calculate, get weather)
+- Use skills from the skills directory
+- Maintain conversation history via sessions
+"""
+
+import logging
+from pathlib import Path
+
+from a2a.types import AgentCard
+from agents.agent import Agent
+from agents.tool import function_tool
+from kagent.core import KAgentConfig
+from kagent.openai import KAgentApp
+
+logger = logging.getLogger(__name__)
+
+SKILLS_DIR = Path(__file__).parent.parent / "skills"
+
+
+# Define tools for the agent
+@function_tool
+def calculate(expression: str) -> str:
+ """Evaluate a mathematical expression and return the result.
+
+ Args:
+ expression: A mathematical expression to evaluate (e.g., "2 + 2", "10 * 5")
+
+ Returns:
+ The result of the calculation as a string
+ """
+ try:
+ # Safe evaluation of basic math expressions
+ # Note: In production, use a proper math expression parser
+ result = eval(expression, {"__builtins__": {}}, {})
+ return f"The result of {expression} is {result}"
+ except Exception as e:
+ return f"Error calculating {expression}: {str(e)}"
+
+
+@function_tool
+def get_weather(location: str) -> str:
+ """Get the current weather for a location.
+
+ Args:
+ location: The city or location to get weather for
+
+ Returns:
+ Weather information for the location
+ """
+ # Simulated weather data
+ weather_data = {
+ "san francisco": "Sunny, 68°F",
+ "new york": "Cloudy, 45°F",
+ "london": "Rainy, 52°F",
+ "tokyo": "Clear, 61°F",
+ }
+
+ location_lower = location.lower()
+ if location_lower in weather_data:
+ return f"The weather in {location} is {weather_data[location_lower]}"
+ else:
+ return f"Weather data not available for {location}. Available cities: {', '.join(weather_data.keys())}"
+
+
+tools = [calculate, get_weather]
+
+# Create the OpenAI agent
+agent = Agent(
+ name="BasicAssistant",
+ instructions="""You are a helpful assistant that can use tools and skills to solve problems.""",
+ tools=tools,
+)
+
+
+# Agent card for A2A protocol
+agent_card = AgentCard(
+ name="basic-openai-agent",
+ description="A basic OpenAI agent with calculator and weather tools",
+ url="localhost:8000",
+ version="0.1.0",
+ capabilities={"streaming": True},
+ defaultInputModes=["text"],
+ defaultOutputModes=["text"],
+ skills=[
+ {
+ "id": "basic",
+ "name": "Basic Assistant",
+ "description": "Can perform calculations and get weather information",
+ "tags": ["calculator", "weather", "assistant"],
+ }
+ ],
+)
+
+config = KAgentConfig()
+
+# Create KAgent app
+app = KAgentApp(
+ agent=agent,
+ agent_card=agent_card,
+ config=config,
+)
+
+
+# Build the FastAPI application
+fastapi_app = app.build()
+
+
+if __name__ == "__main__":
+ import uvicorn
+
+ logging.basicConfig(level=logging.INFO)
+ logger.info("Starting Basic OpenAI Agent...")
+ logger.info("Server will be available at http://0.0.0.0:8080")
+
+ uvicorn.run(fastapi_app, host="0.0.0.0", port=8080)
diff --git a/python/samples/openai/basic_agent/pyproject.toml b/python/samples/openai/basic_agent/pyproject.toml
new file mode 100644
index 000000000..1ebff1054
--- /dev/null
+++ b/python/samples/openai/basic_agent/pyproject.toml
@@ -0,0 +1,22 @@
+[project]
+name = "basic-openai-agent"
+version = "0.1.0"
+description = "Basic OpenAI Agent sample for KAgent"
+readme = "README.md"
+requires-python = ">=3.13"
+dependencies = [
+ "kagent-openai",
+ "openai-agents>=0.4.0",
+ "uvicorn>=0.20.0",
+]
+
+[tool.uv.sources]
+kagent-openai = { workspace = true }
+
+[build-system]
+requires = ["hatchling"]
+build-backend = "hatchling.build"
+
+[tool.hatch.build.targets.wheel]
+packages = ["basic_agent"]
+
diff --git a/python/uv.lock b/python/uv.lock
index 943e35e96..f72803e8b 100644
--- a/python/uv.lock
+++ b/python/uv.lock
@@ -9,11 +9,14 @@ resolution-markers = [
[manifest]
members = [
"basic",
+ "basic-openai-agent",
"currency",
"kagent-adk",
"kagent-core",
"kagent-crewai",
"kagent-langgraph",
+ "kagent-openai",
+ "kagent-skills",
"poem-flow",
"research-crew",
]
@@ -247,15 +250,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790, upload-time = "2025-07-08T09:07:41.548Z" },
]
-[[package]]
-name = "asttokens"
-version = "3.0.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7", size = 61978, upload-time = "2024-11-30T04:30:14.439Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2", size = 26918, upload-time = "2024-11-30T04:30:10.946Z" },
-]
-
[[package]]
name = "attrs"
version = "25.3.0"
@@ -297,6 +291,23 @@ dependencies = [
[package.metadata]
requires-dist = [{ name = "google-adk", specifier = ">=1.8.0" }]
+[[package]]
+name = "basic-openai-agent"
+version = "0.1.0"
+source = { editable = "samples/openai/basic_agent" }
+dependencies = [
+ { name = "kagent-openai" },
+ { name = "openai-agents" },
+ { name = "uvicorn" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "kagent-openai", editable = "packages/kagent-openai" },
+ { name = "openai-agents", specifier = ">=0.4.0" },
+ { name = "uvicorn", specifier = ">=0.20.0" },
+]
+
[[package]]
name = "bcrypt"
version = "5.0.0"
@@ -396,32 +407,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/09/71/54e999902aed72baf26bca0d50781b01838251a462612966e9fc4891eadd/black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717", size = 207646, upload-time = "2025-01-29T04:15:38.082Z" },
]
-[[package]]
-name = "blinker"
-version = "1.9.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460, upload-time = "2024-11-08T17:25:47.436Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458, upload-time = "2024-11-08T17:25:46.184Z" },
-]
-
-[[package]]
-name = "browserbase"
-version = "1.4.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "anyio" },
- { name = "distro" },
- { name = "httpx" },
- { name = "pydantic" },
- { name = "sniffio" },
- { name = "typing-extensions" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/71/df/17ac5e1116ab8f1264c6a9718f935358d20bdcd8ae0e3d1f18fd580cd871/browserbase-1.4.0.tar.gz", hash = "sha256:e2ed36f513c8630b94b826042c4bb9f497c333f3bd28e5b76cb708c65b4318a0", size = 122103, upload-time = "2025-05-16T20:50:40.802Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/58/13/4191423982a2ec69dc8c10a1c4b94a50a0861f49be81ffc19621b75841bc/browserbase-1.4.0-py3-none-any.whl", hash = "sha256:ea9f1fb4a88921975b8b9606835c441a59d8ce82ce00313a6d48bbe8e30f79fb", size = 98044, upload-time = "2025-05-16T20:50:39.331Z" },
-]
-
[[package]]
name = "build"
version = "1.3.0"
@@ -476,6 +461,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" },
]
+[[package]]
+name = "cfgv"
+version = "3.5.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" },
+]
+
[[package]]
name = "charset-normalizer"
version = "3.4.2"
@@ -585,18 +579,17 @@ wheels = [
[[package]]
name = "crewai"
-version = "0.201.1"
+version = "1.6.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "appdirs" },
- { name = "blinker" },
{ name = "chromadb" },
{ name = "click" },
{ name = "instructor" },
{ name = "json-repair" },
{ name = "json5" },
{ name = "jsonref" },
- { name = "litellm" },
+ { name = "mcp" },
{ name = "openai" },
{ name = "openpyxl" },
{ name = "opentelemetry-api" },
@@ -608,16 +601,15 @@ dependencies = [
{ name = "pydantic-settings" },
{ name = "pyjwt" },
{ name = "python-dotenv" },
- { name = "pyvis" },
{ name = "regex" },
{ name = "tokenizers" },
{ name = "tomli" },
{ name = "tomli-w" },
{ name = "uv" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/c6/85/fee06c332662b025762b89431f232b564a8b078ccd9eb935f0d2ed264eb9/crewai-0.201.1.tar.gz", hash = "sha256:8ed336a7c31c8eb2beb312a94e31c6b8ca54dc5178a76413bfcb5707eb5481c6", size = 6596906, upload-time = "2025-09-26T16:57:53.713Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/1e/c4/37f5e8e0ccb2804a3e2acc0ccf58f82dc9415a08fad71a3868cdf830c669/crewai-1.6.1.tar.gz", hash = "sha256:b7d73a8a333abf71b30ab20c54086004cd0c016dfd86bba9c035ad5eb31e22a7", size = 4177912, upload-time = "2025-11-29T01:58:25.573Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/e2/5e/1f9696284c3d5af770b9ea3bfa5ce096d08a94cdc999f9182ca33d5ac888/crewai-0.201.1-py3-none-any.whl", hash = "sha256:798cb882da1d113b0322a574b9ae4b893821fd42a952f9ebcb239d66a68ee5de", size = 472588, upload-time = "2025-09-26T16:57:51.671Z" },
+ { url = "https://files.pythonhosted.org/packages/06/87/8ab9924b79025165ed7f1b04a90f9b80137d18ceae9b8e34445a8495320c/crewai-1.6.1-py3-none-any.whl", hash = "sha256:8cec403ab89183bda28b830c722b6bc22457a2151a6aa46f07730e6fe7ab2723", size = 642861, upload-time = "2025-11-29T01:58:23.232Z" },
]
[package.optional-dependencies]
@@ -627,24 +619,23 @@ tools = [
[[package]]
name = "crewai-tools"
-version = "0.75.0"
+version = "1.6.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "beautifulsoup4" },
{ name = "crewai" },
{ name = "docker" },
{ name = "lancedb" },
- { name = "pypdf" },
+ { name = "pymupdf" },
{ name = "python-docx" },
{ name = "pytube" },
{ name = "requests" },
- { name = "stagehand" },
{ name = "tiktoken" },
{ name = "youtube-transcript-api" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/98/16/c897ed771235c6da4a6bc0c6f0baa12edc2b42d2a300447cd84588363f71/crewai_tools-0.75.0.tar.gz", hash = "sha256:9fffc498c93d35b7d19064caa74c6c8293001018f5aa144fb95f8168f2e75f49", size = 1134823, upload-time = "2025-09-26T18:19:39.252Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/f4/e2/039d47c1d5266a807c9f4f8d4b927fab1ebfb60989ec6b65fcd88070a510/crewai_tools-1.6.1.tar.gz", hash = "sha256:8724400b85b0a97de09fe681b1d0bf4334e3e68bcf5ede8a056e2beed0227907", size = 805758, upload-time = "2025-11-29T01:58:29.613Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/90/39/a5b64857e7f862ff7736d4662f9ec46ae87be024f9d0460a328f5be3fd6f/crewai_tools-0.75.0-py3-none-any.whl", hash = "sha256:92abf7c0ca2650ff10318b553b513e97029e199a10f11ac575a5f9836d495408", size = 739639, upload-time = "2025-09-26T18:19:36.271Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/d1/38d0bc01712cf086cd80fe778ed15ff9b57dc5cf4c0b11f1f40d88d4b50a/crewai_tools-1.6.1-py3-none-any.whl", hash = "sha256:62996840db257af9c471f9f4191b4268cb4dd7006b381670b968ac55973e08af", size = 764950, upload-time = "2025-11-29T01:58:27.441Z" },
]
[[package]]
@@ -705,15 +696,6 @@ requires-dist = [
{ name = "langsmith", extras = ["otel"], specifier = ">=0.4.30" },
]
-[[package]]
-name = "decorator"
-version = "5.2.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" },
-]
-
[[package]]
name = "defusedxml"
version = "0.7.1"
@@ -744,6 +726,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" },
]
+[[package]]
+name = "distlib"
+version = "0.4.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
+]
+
[[package]]
name = "distro"
version = "1.9.0"
@@ -794,15 +785,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" },
]
-[[package]]
-name = "executing"
-version = "2.2.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/cc/28/c14e053b6762b1044f34a13aab6859bbf40456d37d23aa286ac24cfd9a5d/executing-2.2.1.tar.gz", hash = "sha256:3632cc370565f6648cc328b32435bd120a1e4ebb20c77e3fdde9a13cd1e533c4", size = 1129488, upload-time = "2025-09-01T09:48:10.866Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/c1/ea/53f2148663b321f21b5a606bd5f191517cf40b7072c0497d3c92c4a13b1e/executing-2.2.1-py2.py3-none-any.whl", hash = "sha256:760643d3452b4d777d295bb167ccc74c64a81df23fb5e08eff250c425a4b2017", size = 28317, upload-time = "2025-09-01T09:48:08.5Z" },
-]
-
[[package]]
name = "fastapi"
version = "0.123.10"
@@ -1420,7 +1402,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b1/cf/f5c0b23309070ae93de75c90d29300751a5aacefc0a3ed1b1d8edb28f08b/greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad", size = 270732, upload-time = "2025-06-05T16:10:08.26Z" },
{ url = "https://files.pythonhosted.org/packages/48/ae/91a957ba60482d3fecf9be49bc3948f341d706b52ddb9d83a70d42abd498/greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef", size = 639033, upload-time = "2025-06-05T16:38:53.983Z" },
{ url = "https://files.pythonhosted.org/packages/6f/df/20ffa66dd5a7a7beffa6451bdb7400d66251374ab40b99981478c69a67a8/greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3", size = 652999, upload-time = "2025-06-05T16:41:37.89Z" },
- { url = "https://files.pythonhosted.org/packages/51/b4/ebb2c8cb41e521f1d72bf0465f2f9a2fd803f674a88db228887e6847077e/greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95", size = 647368, upload-time = "2025-06-05T16:48:21.467Z" },
{ url = "https://files.pythonhosted.org/packages/8e/6a/1e1b5aa10dced4ae876a322155705257748108b7fd2e4fae3f2a091fe81a/greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb", size = 650037, upload-time = "2025-06-05T16:13:06.402Z" },
{ url = "https://files.pythonhosted.org/packages/26/f2/ad51331a157c7015c675702e2d5230c243695c788f8f75feba1af32b3617/greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b", size = 608402, upload-time = "2025-06-05T16:12:51.91Z" },
{ url = "https://files.pythonhosted.org/packages/26/bc/862bd2083e6b3aff23300900a956f4ea9a4059de337f5c8734346b9b34fc/greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0", size = 1119577, upload-time = "2025-06-05T16:36:49.787Z" },
@@ -1429,12 +1410,23 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d8/ca/accd7aa5280eb92b70ed9e8f7fd79dc50a2c21d8c73b9a0856f5b564e222/greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86", size = 271479, upload-time = "2025-06-05T16:10:47.525Z" },
{ url = "https://files.pythonhosted.org/packages/55/71/01ed9895d9eb49223280ecc98a557585edfa56b3d0e965b9fa9f7f06b6d9/greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97", size = 683952, upload-time = "2025-06-05T16:38:55.125Z" },
{ url = "https://files.pythonhosted.org/packages/ea/61/638c4bdf460c3c678a0a1ef4c200f347dff80719597e53b5edb2fb27ab54/greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728", size = 696917, upload-time = "2025-06-05T16:41:38.959Z" },
- { url = "https://files.pythonhosted.org/packages/22/cc/0bd1a7eb759d1f3e3cc2d1bc0f0b487ad3cc9f34d74da4b80f226fde4ec3/greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a", size = 692443, upload-time = "2025-06-05T16:48:23.113Z" },
{ url = "https://files.pythonhosted.org/packages/67/10/b2a4b63d3f08362662e89c103f7fe28894a51ae0bc890fabf37d1d780e52/greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892", size = 692995, upload-time = "2025-06-05T16:13:07.972Z" },
{ url = "https://files.pythonhosted.org/packages/5a/c6/ad82f148a4e3ce9564056453a71529732baf5448ad53fc323e37efe34f66/greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141", size = 655320, upload-time = "2025-06-05T16:12:53.453Z" },
{ url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" },
]
+[[package]]
+name = "griffe"
+version = "1.15.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "colorama" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/0d/0c/3a471b6e31951dce2360477420d0a8d1e00dea6cf33b70f3e8c3ab6e28e1/griffe-1.15.0.tar.gz", hash = "sha256:7726e3afd6f298fbc3696e67958803e7ac843c1cfe59734b6251a40cdbfb5eea", size = 424112, upload-time = "2025-11-10T15:03:15.52Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/9c/83/3b1d03d36f224edded98e9affd0467630fc09d766c0e56fb1498cbb04a9b/griffe-1.15.0-py3-none-any.whl", hash = "sha256:6f6762661949411031f5fcda9593f586e6ce8340f0ba88921a0f2ef7a81eb9a3", size = 150705, upload-time = "2025-11-10T15:03:13.549Z" },
+]
+
[[package]]
name = "grpc-google-iam-v1"
version = "0.14.2"
@@ -1652,6 +1644,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f0/0f/310fb31e39e2d734ccaa2c0fb981ee41f7bd5056ce9bc29b2248bd569169/humanfriendly-10.0-py2.py3-none-any.whl", hash = "sha256:1697e1a8a8f550fd43c2865cd84542fc175a61dcb779b6fee18cf6b6ccba1477", size = 86794, upload-time = "2021-09-17T21:40:39.897Z" },
]
+[[package]]
+name = "identify"
+version = "2.6.15"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ff/e7/685de97986c916a6d93b3876139e00eef26ad5bbbd61925d670ae8013449/identify-2.6.15.tar.gz", hash = "sha256:e4f4864b96c6557ef2a1e1c951771838f4edc9df3a72ec7118b338801b11c7bf", size = 99311, upload-time = "2025-10-02T17:43:40.631Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/0f/1c/e5fd8f973d4f375adb21565739498e2e9a1e54c858a97b9a8ccfdc81da9b/identify-2.6.15-py2.py3-none-any.whl", hash = "sha256:1181ef7608e00704db228516541eb83a88a9f94433a8c80bb9b5bd54b1d81757", size = 99183, upload-time = "2025-10-02T17:43:39.137Z" },
+]
+
[[package]]
name = "idna"
version = "3.10"
@@ -1693,7 +1694,7 @@ wheels = [
[[package]]
name = "instructor"
-version = "1.11.3"
+version = "1.13.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
@@ -1702,61 +1703,18 @@ dependencies = [
{ name = "jinja2" },
{ name = "jiter" },
{ name = "openai" },
+ { name = "pre-commit" },
{ name = "pydantic" },
{ name = "pydantic-core" },
{ name = "requests" },
{ name = "rich" },
{ name = "tenacity" },
+ { name = "ty" },
{ name = "typer" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/6a/af/428b5d7a6a6eca5738c51706795a395099c141779cd1bbb9a6e2b0d3a94d/instructor-1.11.3.tar.gz", hash = "sha256:6f58fea6fadfa228c411ecdedad4662230c456718f4a770a97a806dcb36b3287", size = 69879936, upload-time = "2025-09-09T15:44:31.548Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/31/f0/7f31609ec2fb84b140ff573abf1cce78cd3a2a3c6479b60aa82b69d40d2a/instructor-1.13.0.tar.gz", hash = "sha256:bf838a5c503fafdd034a9b1f8544c5e1f62462eea9f89932bc75c116ad35ab5a", size = 69898121, upload-time = "2025-11-06T04:19:31.034Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/4c/5f/54783e5b1a497de204a0a59b5e22549f67f5f1aceaa08e00db21b1107ce4/instructor-1.11.3-py3-none-any.whl", hash = "sha256:9ecd7a3780a045506165debad2ddcc4a30e1057f06997973185f356b0a42c6e3", size = 155501, upload-time = "2025-09-09T15:44:26.139Z" },
-]
-
-[[package]]
-name = "ipython"
-version = "9.6.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "colorama", marker = "sys_platform == 'win32'" },
- { name = "decorator" },
- { name = "ipython-pygments-lexers" },
- { name = "jedi" },
- { name = "matplotlib-inline" },
- { name = "pexpect", marker = "sys_platform != 'emscripten' and sys_platform != 'win32'" },
- { name = "prompt-toolkit" },
- { name = "pygments" },
- { name = "stack-data" },
- { name = "traitlets" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/2a/34/29b18c62e39ee2f7a6a3bba7efd952729d8aadd45ca17efc34453b717665/ipython-9.6.0.tar.gz", hash = "sha256:5603d6d5d356378be5043e69441a072b50a5b33b4503428c77b04cb8ce7bc731", size = 4396932, upload-time = "2025-09-29T10:55:53.948Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/48/c5/d5e07995077e48220269c28a221e168c91123ad5ceee44d548f54a057fc0/ipython-9.6.0-py3-none-any.whl", hash = "sha256:5f77efafc886d2f023442479b8149e7d86547ad0a979e9da9f045d252f648196", size = 616170, upload-time = "2025-09-29T10:55:47.676Z" },
-]
-
-[[package]]
-name = "ipython-pygments-lexers"
-version = "1.1.1"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "pygments" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/ef/4c/5dd1d8af08107f88c7f741ead7a40854b8ac24ddf9ae850afbcf698aa552/ipython_pygments_lexers-1.1.1.tar.gz", hash = "sha256:09c0138009e56b6854f9535736f4171d855c8c08a563a0dcd8022f78355c7e81", size = 8393, upload-time = "2025-01-17T11:24:34.505Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/d9/33/1f075bf72b0b747cb3288d011319aaf64083cf2efef8354174e3ed4540e2/ipython_pygments_lexers-1.1.1-py3-none-any.whl", hash = "sha256:a9462224a505ade19a605f71f8fa63c2048833ce50abc86768a0d81d876dc81c", size = 8074, upload-time = "2025-01-17T11:24:33.271Z" },
-]
-
-[[package]]
-name = "jedi"
-version = "0.19.2"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "parso" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/72/3a/79a912fbd4d8dd6fbb02bf69afd3bb72cf0c729bb3063c6f4498603db17a/jedi-0.19.2.tar.gz", hash = "sha256:4770dc3de41bde3966b02eb84fbcf557fb33cce26ad23da12c742fb50ecb11f0", size = 1231287, upload-time = "2024-11-11T01:41:42.873Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/c0/5a/9cac0c82afec3d09ccd97c8b6502d48f165f9124db81b4bcb90b4af974ee/jedi-0.19.2-py2.py3-none-any.whl", hash = "sha256:a8ef22bde8490f57fe5c7681a3c83cb58874daf72b4784de3cce5b6ef6edb5b9", size = 1572278, upload-time = "2024-11-11T01:41:40.175Z" },
+ { url = "https://files.pythonhosted.org/packages/95/64/6542ac826a4c9b937b67c096a785af1aaa26b22fcb7c81223cfe4038205b/instructor-1.13.0-py3-none-any.whl", hash = "sha256:2b735b6ea0d3194548369a18254f1dde83cb5ec0b182de77adbadd8be73caddc", size = 160904, upload-time = "2025-11-06T04:19:24.674Z" },
]
[[package]]
@@ -1837,15 +1795,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/73/07/02e16ed01e04a374e644b575638ec7987ae846d25ad97bcc9945a3ee4b0e/jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade", size = 12898, upload-time = "2023-06-16T21:01:28.466Z" },
]
-[[package]]
-name = "jsonpickle"
-version = "4.1.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/e4/a6/d07afcfdef402900229bcca795f80506b207af13a838d4d99ad45abf530c/jsonpickle-4.1.1.tar.gz", hash = "sha256:f86e18f13e2b96c1c1eede0b7b90095bbb61d99fedc14813c44dc2f361dbbae1", size = 316885, upload-time = "2025-06-02T20:36:11.57Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/c1/73/04df8a6fa66d43a9fd45c30f283cc4afff17da671886e451d52af60bdc7e/jsonpickle-4.1.1-py3-none-any.whl", hash = "sha256:bb141da6057898aa2438ff268362b126826c812a1721e31cf08a6e142910dc91", size = 47125, upload-time = "2025-06-02T20:36:08.647Z" },
-]
-
[[package]]
name = "jsonpointer"
version = "3.0.0"
@@ -1909,6 +1858,7 @@ dependencies = [
{ name = "httpx" },
{ name = "jsonref" },
{ name = "kagent-core" },
+ { name = "kagent-skills" },
{ name = "litellm" },
{ name = "mcp" },
{ name = "openai" },
@@ -1943,6 +1893,7 @@ requires-dist = [
{ name = "httpx", specifier = ">=0.25.0" },
{ name = "jsonref", specifier = ">=1.1.0" },
{ name = "kagent-core", editable = "packages/kagent-core" },
+ { name = "kagent-skills", editable = "packages/kagent-skills" },
{ name = "litellm", specifier = ">=1.74.3" },
{ name = "mcp", specifier = ">=1.12.0" },
{ name = "openai", specifier = ">=1.72.0" },
@@ -2015,7 +1966,7 @@ dev = [
requires-dist = [
{ name = "a2a-sdk", extras = ["http-server"], specifier = ">=0.3.1" },
{ name = "black", marker = "extra == 'dev'", specifier = ">=23.0.0" },
- { name = "crewai", extras = ["tools"], specifier = ">=0.193.2,<1.0.0" },
+ { name = "crewai", extras = ["tools"], specifier = ">=1.2.0" },
{ name = "fastapi", specifier = ">=0.100.0" },
{ name = "google-genai", specifier = ">=1.21.1" },
{ name = "httpx", specifier = ">=0.25.0" },
@@ -2074,6 +2025,78 @@ requires-dist = [
]
provides-extras = ["dev"]
+[[package]]
+name = "kagent-openai"
+version = "0.1.0"
+source = { editable = "packages/kagent-openai" }
+dependencies = [
+ { name = "a2a-sdk" },
+ { name = "fastapi" },
+ { name = "httpx" },
+ { name = "kagent-core" },
+ { name = "kagent-skills" },
+ { name = "openai" },
+ { name = "openai-agents" },
+ { name = "opentelemetry-instrumentation-openai-agents" },
+ { name = "pydantic" },
+ { name = "uvicorn" },
+]
+
+[package.optional-dependencies]
+dev = [
+ { name = "black" },
+ { name = "pytest" },
+ { name = "pytest-asyncio" },
+ { name = "ruff" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "a2a-sdk", specifier = ">=0.3.1" },
+ { name = "black", marker = "extra == 'dev'", specifier = ">=23.0.0" },
+ { name = "fastapi", specifier = ">=0.100.0" },
+ { name = "httpx", specifier = ">=0.25.0" },
+ { name = "kagent-core", editable = "packages/kagent-core" },
+ { name = "kagent-skills", editable = "packages/kagent-skills" },
+ { name = "openai", specifier = ">=1.72.0" },
+ { name = "openai-agents", specifier = ">=0.4.0" },
+ { name = "opentelemetry-instrumentation-openai-agents", specifier = ">=0.48.0" },
+ { name = "pydantic", specifier = ">=2.0.0" },
+ { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" },
+ { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" },
+ { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1.0" },
+ { name = "uvicorn", specifier = ">=0.20.0" },
+]
+provides-extras = ["dev"]
+
+[[package]]
+name = "kagent-skills"
+version = "0.1.0"
+source = { editable = "packages/kagent-skills" }
+dependencies = [
+ { name = "pydantic" },
+ { name = "pyyaml" },
+]
+
+[package.optional-dependencies]
+dev = [
+ { name = "black" },
+ { name = "pytest" },
+ { name = "pytest-asyncio" },
+ { name = "ruff" },
+]
+
+[package.metadata]
+requires-dist = [
+ { name = "black", marker = "extra == 'dev'", specifier = ">=23.0.0" },
+ { name = "pydantic", specifier = ">=2.0.0" },
+ { name = "pytest", marker = "extra == 'dev'", specifier = ">=7.0.0" },
+ { name = "pytest-asyncio", marker = "extra == 'dev'", specifier = ">=0.21.0" },
+ { name = "pyyaml", specifier = ">=6.0" },
+ { name = "ruff", marker = "extra == 'dev'", specifier = ">=0.1.0" },
+]
+provides-extras = ["dev"]
+
[[package]]
name = "kubernetes"
version = "33.1.0"
@@ -2414,21 +2437,9 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" },
]
-[[package]]
-name = "matplotlib-inline"
-version = "0.1.7"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "traitlets" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/99/5b/a36a337438a14116b16480db471ad061c36c3694df7c2084a0da7ba538b7/matplotlib_inline-0.1.7.tar.gz", hash = "sha256:8423b23ec666be3d16e16b60bdd8ac4e86e840ebd1dd11a30b9f117f2fa0ab90", size = 8159, upload-time = "2024-04-15T13:44:44.803Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/8f/8e/9ad090d3553c280a8060fbf6e24dc1c0c29704ee7d1c372f0c174aa59285/matplotlib_inline-0.1.7-py3-none-any.whl", hash = "sha256:df192d39a4ff8f21b1895d72e6a13f5fcc5099f00fa84384e0ea28c2cc0653ca", size = 9899, upload-time = "2024-04-15T13:44:43.265Z" },
-]
-
[[package]]
name = "mcp"
-version = "1.12.3"
+version = "1.25.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -2437,15 +2448,18 @@ dependencies = [
{ name = "jsonschema" },
{ name = "pydantic" },
{ name = "pydantic-settings" },
+ { name = "pyjwt", extra = ["crypto"] },
{ name = "python-multipart" },
{ name = "pywin32", marker = "sys_platform == 'win32'" },
{ name = "sse-starlette" },
{ name = "starlette" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/4d/19/9955e2df5384ff5dd25d38f8e88aaf89d2d3d9d39f27e7383eaf0b293836/mcp-1.12.3.tar.gz", hash = "sha256:ab2e05f5e5c13e1dc90a4a9ef23ac500a6121362a564447855ef0ab643a99fed", size = 427203, upload-time = "2025-07-31T18:36:36.795Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/8f/8b/0be74e3308a486f1d127f3f6767de5f9f76454c9b4183210c61cc50999b6/mcp-1.12.3-py3-none-any.whl", hash = "sha256:5483345bf39033b858920a5b6348a303acacf45b23936972160ff152107b850e", size = 158810, upload-time = "2025-07-31T18:36:34.915Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" },
]
[[package]]
@@ -2585,21 +2599,12 @@ wheels = [
]
[[package]]
-name = "nest-asyncio"
-version = "1.6.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/83/f8/51569ac65d696c8ecbee95938f89d4abf00f47d58d48f6fbabfe8f0baefe/nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe", size = 7418, upload-time = "2024-01-21T14:25:19.227Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/a0/c4/c2971a3ba4c6103a3d10c4b0f24f461ddc027f0f09763220cf35ca1401b3/nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c", size = 5195, upload-time = "2024-01-21T14:25:17.223Z" },
-]
-
-[[package]]
-name = "networkx"
-version = "3.5"
+name = "nodeenv"
+version = "1.10.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/6c/4f/ccdb8ad3a38e583f214547fd2f7ff1fc160c43a75af88e6aec213404b96a/networkx-3.5.tar.gz", hash = "sha256:d4c6f9cf81f52d69230866796b82afbccdec3db7ae4fbd1b65ea750feed50037", size = 2471065, upload-time = "2025-05-29T11:35:07.804Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/eb/8d/776adee7bbf76365fdd7f2552710282c79a4ead5d2a46408c9043a2b70ba/networkx-3.5-py3-none-any.whl", hash = "sha256:0030d386a9a06dee3565298b4a734b68589749a544acbb6c412dc9e2489ec6ec", size = 2034406, upload-time = "2025-05-29T11:35:04.961Z" },
+ { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" },
]
[[package]]
@@ -2687,7 +2692,7 @@ wheels = [
[[package]]
name = "openai"
-version = "1.99.1"
+version = "2.14.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -2699,9 +2704,27 @@ dependencies = [
{ name = "tqdm" },
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/03/30/f0fb7907a77e733bb801c7bdcde903500b31215141cdb261f04421e6fbec/openai-1.99.1.tar.gz", hash = "sha256:2c9d8e498c298f51bb94bcac724257a3a6cac6139ccdfc1186c6708f7a93120f", size = 497075, upload-time = "2025-08-05T19:42:36.131Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d8/b1/12fe1c196bea326261718eb037307c1c1fe1dedc2d2d4de777df822e6238/openai-2.14.0.tar.gz", hash = "sha256:419357bedde9402d23bf8f2ee372fca1985a73348debba94bddff06f19459952", size = 626938, upload-time = "2025-12-19T03:28:45.742Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" },
+]
+
+[[package]]
+name = "openai-agents"
+version = "0.6.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "griffe" },
+ { name = "mcp" },
+ { name = "openai" },
+ { name = "pydantic" },
+ { name = "requests" },
+ { name = "types-requests" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/e7/5c/5ebface62a0efdc7298152dcd2d32164403e25e53f1088c042936d8d40f9/openai_agents-0.6.5.tar.gz", hash = "sha256:67e8cab27082d1a1fe6f3fecfcf89b41ff249988a75640bbcc2764952d603ef0", size = 2044506, upload-time = "2026-01-06T15:32:50.936Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/54/15/9c85154ffd283abfc43309ff3aaa63c3fd02f7767ee684e73670f6c5ade2/openai-1.99.1-py3-none-any.whl", hash = "sha256:8eeccc69e0ece1357b51ca0d9fb21324afee09b20c3e5b547d02445ca18a4e03", size = 767827, upload-time = "2025-08-05T19:42:34.192Z" },
+ { url = "https://files.pythonhosted.org/packages/17/db/16020e45d53366f2ed653ce0ddf959a647687d47180954de7654a133b910/openai_agents-0.6.5-py3-none-any.whl", hash = "sha256:c81d2eaa5c4563b8e893ba836fe170cf10ba974420ff283b4f001f84e7cb6e6b", size = 249352, upload-time = "2026-01-06T15:32:48.847Z" },
]
[[package]]
@@ -2947,6 +2970,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/62/10/59ecc2bde0915911ec4a39f2af06e7fc0c8c0356d822d263e975578a087c/opentelemetry_instrumentation_openai-0.47.3-py3-none-any.whl", hash = "sha256:c16f8ae05cd583b373f1fe3d34a724d020c18310ce431ac4ad51fd23d8279293", size = 35271, upload-time = "2025-09-21T12:12:25.439Z" },
]
+[[package]]
+name = "opentelemetry-instrumentation-openai-agents"
+version = "0.48.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "opentelemetry-api" },
+ { name = "opentelemetry-instrumentation" },
+ { name = "opentelemetry-semantic-conventions" },
+ { name = "opentelemetry-semantic-conventions-ai" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/7a/28/a97989398cf39cf6e66bc3cb1e35409bb2476fe2fe28716eb4829644d8d6/opentelemetry_instrumentation_openai_agents-0.48.1.tar.gz", hash = "sha256:bfde836e7be87d007f56466162d3af82d7df87ef5be3e3b04f5311635ef68e75", size = 8838, upload-time = "2025-11-17T15:26:46.975Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/72/31/d78e7939e234a8bb2e8fb6bf903c6e52e13f5683cac4dee9a9657a359f9a/opentelemetry_instrumentation_openai_agents-0.48.1-py3-none-any.whl", hash = "sha256:399eae23171ffba816a92c5045846737a8094d0f21d1de7d09051260302a9a82", size = 9847, upload-time = "2025-11-17T15:26:17.698Z" },
+]
+
[[package]]
name = "opentelemetry-proto"
version = "1.37.0"
@@ -3087,15 +3125,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
]
-[[package]]
-name = "parso"
-version = "0.8.5"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/d4/de/53e0bcf53d13e005bd8c92e7855142494f41171b34c2536b86187474184d/parso-0.8.5.tar.gz", hash = "sha256:034d7354a9a018bdce352f48b2a8a450f05e9d6ee85db84764e9b6bd96dafe5a", size = 401205, upload-time = "2025-08-23T15:15:28.028Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/16/32/f8e3c85d1d5250232a5d3477a2a28cc291968ff175caeadaf3cc19ce0e4a/parso-0.8.5-py2.py3-none-any.whl", hash = "sha256:646204b5ee239c396d040b90f9e272e9a8017c630092bf59980beb62fd033887", size = 106668, upload-time = "2025-08-23T15:15:25.663Z" },
-]
-
[[package]]
name = "pathspec"
version = "0.12.1"
@@ -3132,18 +3161,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/db/e0/52b67d4f00e09e497aec4f71bc44d395605e8ebcea52543242ed34c25ef9/pdfplumber-0.11.7-py3-none-any.whl", hash = "sha256:edd2195cca68bd770da479cf528a737e362968ec2351e62a6c0b71ff612ac25e", size = 60029, upload-time = "2025-06-12T11:30:48.89Z" },
]
-[[package]]
-name = "pexpect"
-version = "4.9.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "ptyprocess" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/42/92/cc564bf6381ff43ce1f4d06852fc19a2f11d180f23dc32d9588bee2f149d/pexpect-4.9.0.tar.gz", hash = "sha256:ee7d41123f3c9911050ea2c2dac107568dc43b2d3b0c7557a33212c398ead30f", size = 166450, upload-time = "2023-11-25T09:07:26.339Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/9e/c3/059298687310d527a58bb01f3b1965787ee3b40dce76752eda8b44e9a2c5/pexpect-4.9.0-py2.py3-none-any.whl", hash = "sha256:7236d1e080e4936be2dc3e326cec0af72acf9212a7e1d060210e70a47e253523", size = 63772, upload-time = "2023-11-25T06:56:14.81Z" },
-]
-
[[package]]
name = "pillow"
version = "11.3.0"
@@ -3208,25 +3225,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" },
]
-[[package]]
-name = "playwright"
-version = "1.55.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "greenlet" },
- { name = "pyee" },
-]
-wheels = [
- { url = "https://files.pythonhosted.org/packages/80/3a/c81ff76df266c62e24f19718df9c168f49af93cabdbc4608ae29656a9986/playwright-1.55.0-py3-none-macosx_10_13_x86_64.whl", hash = "sha256:d7da108a95001e412effca4f7610de79da1637ccdf670b1ae3fdc08b9694c034", size = 40428109, upload-time = "2025-08-28T15:46:20.357Z" },
- { url = "https://files.pythonhosted.org/packages/cf/f5/bdb61553b20e907196a38d864602a9b4a461660c3a111c67a35179b636fa/playwright-1.55.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:8290cf27a5d542e2682ac274da423941f879d07b001f6575a5a3a257b1d4ba1c", size = 38687254, upload-time = "2025-08-28T15:46:23.925Z" },
- { url = "https://files.pythonhosted.org/packages/4a/64/48b2837ef396487807e5ab53c76465747e34c7143fac4a084ef349c293a8/playwright-1.55.0-py3-none-macosx_11_0_universal2.whl", hash = "sha256:25b0d6b3fd991c315cca33c802cf617d52980108ab8431e3e1d37b5de755c10e", size = 40428108, upload-time = "2025-08-28T15:46:27.119Z" },
- { url = "https://files.pythonhosted.org/packages/08/33/858312628aa16a6de97839adc2ca28031ebc5391f96b6fb8fdf1fcb15d6c/playwright-1.55.0-py3-none-manylinux1_x86_64.whl", hash = "sha256:c6d4d8f6f8c66c483b0835569c7f0caa03230820af8e500c181c93509c92d831", size = 45905643, upload-time = "2025-08-28T15:46:30.312Z" },
- { url = "https://files.pythonhosted.org/packages/83/83/b8d06a5b5721931aa6d5916b83168e28bd891f38ff56fe92af7bdee9860f/playwright-1.55.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29a0777c4ce1273acf90c87e4ae2fe0130182100d99bcd2ae5bf486093044838", size = 45296647, upload-time = "2025-08-28T15:46:33.221Z" },
- { url = "https://files.pythonhosted.org/packages/06/2e/9db64518aebcb3d6ef6cd6d4d01da741aff912c3f0314dadb61226c6a96a/playwright-1.55.0-py3-none-win32.whl", hash = "sha256:29e6d1558ad9d5b5c19cbec0a72f6a2e35e6353cd9f262e22148685b86759f90", size = 35476046, upload-time = "2025-08-28T15:46:36.184Z" },
- { url = "https://files.pythonhosted.org/packages/46/4f/9ba607fa94bb9cee3d4beb1c7b32c16efbfc9d69d5037fa85d10cafc618b/playwright-1.55.0-py3-none-win_amd64.whl", hash = "sha256:7eb5956473ca1951abb51537e6a0da55257bb2e25fc37c2b75af094a5c93736c", size = 35476048, upload-time = "2025-08-28T15:46:38.867Z" },
- { url = "https://files.pythonhosted.org/packages/21/98/5ca173c8ec906abde26c28e1ecb34887343fd71cc4136261b90036841323/playwright-1.55.0-py3-none-win_arm64.whl", hash = "sha256:012dc89ccdcbd774cdde8aeee14c08e0dd52ddb9135bf10e9db040527386bd76", size = 31225543, upload-time = "2025-08-28T15:46:41.613Z" },
-]
-
[[package]]
name = "pluggy"
version = "1.6.0"
@@ -3241,15 +3239,11 @@ name = "poem-flow"
version = "0.1.0"
source = { editable = "samples/crewai/poem_flow" }
dependencies = [
- { name = "crewai", extra = ["tools"] },
{ name = "kagent-crewai" },
]
[package.metadata]
-requires-dist = [
- { name = "crewai", extras = ["tools"], specifier = ">=0.193.2,<1.0.0" },
- { name = "kagent-crewai", editable = "packages/kagent-crewai" },
-]
+requires-dist = [{ name = "kagent-crewai", editable = "packages/kagent-crewai" }]
[[package]]
name = "portalocker"
@@ -3280,15 +3274,19 @@ wheels = [
]
[[package]]
-name = "prompt-toolkit"
-version = "3.0.52"
+name = "pre-commit"
+version = "4.5.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "wcwidth" },
+ { name = "cfgv" },
+ { name = "identify" },
+ { name = "nodeenv" },
+ { name = "pyyaml" },
+ { name = "virtualenv" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198, upload-time = "2025-08-27T15:24:02.057Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/40/f1/6d86a29246dfd2e9b6237f0b5823717f60cad94d47ddc26afa916d21f525/pre_commit-4.5.1.tar.gz", hash = "sha256:eb545fcff725875197837263e977ea257a402056661f09dae08e4b149b030a61", size = 198232, upload-time = "2025-12-16T21:14:33.552Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431, upload-time = "2025-08-27T15:23:59.498Z" },
+ { url = "https://files.pythonhosted.org/packages/5d/19/fd3ef348460c80af7bb4669ea7926651d1f95c23ff2df18b9d24bab4f3fa/pre_commit-4.5.1-py2.py3-none-any.whl", hash = "sha256:3b3afd891e97337708c1674210f8eba659b52a38ea5f822ff142d10786221f77", size = 226437, upload-time = "2025-12-16T21:14:32.409Z" },
]
[[package]]
@@ -3373,24 +3371,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/50/1b/6921afe68c74868b4c9fa424dad3be35b095e16687989ebbb50ce4fceb7c/psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553", size = 244885, upload-time = "2025-02-13T21:54:37.486Z" },
]
-[[package]]
-name = "ptyprocess"
-version = "0.7.0"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220", size = 70762, upload-time = "2020-12-28T15:15:30.155Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" },
-]
-
-[[package]]
-name = "pure-eval"
-version = "0.2.3"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42", size = 19752, upload-time = "2024-07-21T12:58:21.801Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" },
-]
-
[[package]]
name = "pyarrow"
version = "21.0.0"
@@ -3537,7 +3517,7 @@ wheels = [
[[package]]
name = "pydantic"
-version = "2.11.9"
+version = "2.12.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "annotated-types" },
@@ -3545,37 +3525,62 @@ dependencies = [
{ name = "typing-extensions" },
{ name = "typing-inspection" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/ff/5d/09a551ba512d7ca404d785072700d3f6727a02f6f3c24ecfd081c7cf0aa8/pydantic-2.11.9.tar.gz", hash = "sha256:6b8ffda597a14812a7975c90b82a8a2e777d9257aba3453f973acd3c032a18e2", size = 788495, upload-time = "2025-09-13T11:26:39.325Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/3e/d3/108f2006987c58e76691d5ae5d200dd3e0f532cb4e5fa3560751c3a1feba/pydantic-2.11.9-py3-none-any.whl", hash = "sha256:c42dd626f5cfc1c6950ce6205ea58c93efa406da65f479dcb4029d5934857da2", size = 444855, upload-time = "2025-09-13T11:26:36.909Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" },
]
[[package]]
name = "pydantic-core"
-version = "2.33.2"
+version = "2.41.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" },
- { url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" },
- { url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" },
- { url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" },
- { url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" },
- { url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" },
- { url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" },
- { url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" },
- { url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" },
- { url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" },
- { url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" },
- { url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" },
- { url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" },
- { url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" },
- { url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
- { url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
- { url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" },
+ { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" },
+ { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" },
+ { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" },
+ { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" },
+ { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" },
+ { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" },
+ { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" },
+ { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" },
+ { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" },
+ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" },
+ { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" },
+ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" },
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" },
]
[[package]]
@@ -3592,18 +3597,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/58/f0/427018098906416f580e3cf1366d3b1abfb408a0652e9f31600c24a1903c/pydantic_settings-2.10.1-py3-none-any.whl", hash = "sha256:a60952460b99cf661dc25c29c0ef171721f98bfcb52ef8d9ea4c943d7c8cc796", size = 45235, upload-time = "2025-06-24T13:26:45.485Z" },
]
-[[package]]
-name = "pyee"
-version = "13.0.0"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "typing-extensions" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/95/03/1fd98d5841cd7964a27d729ccf2199602fe05eb7a405c1462eb7277945ed/pyee-13.0.0.tar.gz", hash = "sha256:b391e3c5a434d1f5118a25615001dbc8f669cf410ab67d04c4d4e07c55481c37", size = 31250, upload-time = "2025-03-17T18:53:15.955Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/9b/4d/b9add7c84060d4c1906abe9a7e5359f2a60f7a9a4f67268b2766673427d8/pyee-13.0.0-py3-none-any.whl", hash = "sha256:48195a3cddb3b1515ce0695ed76036b5ccc2ef3a9f963ff9f77aec0139845498", size = 15730, upload-time = "2025-03-17T18:53:14.532Z" },
-]
-
[[package]]
name = "pygments"
version = "2.19.2"
@@ -3622,6 +3615,11 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" },
]
+[package.optional-dependencies]
+crypto = [
+ { name = "cryptography" },
+]
+
[[package]]
name = "pylance"
version = "0.38.0"
@@ -3641,21 +3639,27 @@ wheels = [
]
[[package]]
-name = "pyparsing"
-version = "3.2.3"
+name = "pymupdf"
+version = "1.26.7"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/48/d6/09b28f027b510838559f7748807192149c419b30cb90e6d5f0cf916dc9dc/pymupdf-1.26.7.tar.gz", hash = "sha256:71add8bdc8eb1aaa207c69a13400693f06ad9b927bea976f5d5ab9df0bb489c3", size = 84327033, upload-time = "2025-12-11T21:48:50.694Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" },
+ { url = "https://files.pythonhosted.org/packages/94/35/cd74cea1787b2247702ef8522186bdef32e9cb30a099e6bb864627ef6045/pymupdf-1.26.7-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:07085718dfdae5ab83b05eb5eb397f863bcc538fe05135318a01ea353e7a1353", size = 23179369, upload-time = "2025-12-11T21:47:21.587Z" },
+ { url = "https://files.pythonhosted.org/packages/72/74/448b6172927c829c6a3fba80078d7b0a016ebbe2c9ee528821f5ea21677a/pymupdf-1.26.7-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:31aa9c8377ea1eea02934b92f4dcf79fb2abba0bf41f8a46d64c3e31546a3c02", size = 22470101, upload-time = "2025-12-11T21:47:37.105Z" },
+ { url = "https://files.pythonhosted.org/packages/65/e7/47af26f3ac76be7ac3dd4d6cc7ee105948a8355d774e5ca39857bf91c11c/pymupdf-1.26.7-cp310-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:e419b609996434a14a80fa060adec72c434a1cca6a511ec54db9841bc5d51b3c", size = 23502486, upload-time = "2025-12-12T09:51:25.824Z" },
+ { url = "https://files.pythonhosted.org/packages/2a/6b/3de1714d734ff949be1e90a22375d0598d3540b22ae73eb85c2d7d1f36a9/pymupdf-1.26.7-cp310-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:69dfc78f206a96e5b3ac22741263ebab945fdf51f0dbe7c5757c3511b23d9d72", size = 24115727, upload-time = "2025-12-11T21:47:51.274Z" },
+ { url = "https://files.pythonhosted.org/packages/62/9b/f86224847949577a523be2207315ae0fd3155b5d909cd66c274d095349a3/pymupdf-1.26.7-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:1d5106f46e1ca0d64d46bd51892372a4f82076bdc14a9678d33d630702abca36", size = 24324386, upload-time = "2025-12-12T14:58:45.483Z" },
+ { url = "https://files.pythonhosted.org/packages/85/8e/a117d39092ca645fde8b903f4a941d9aa75b370a67b4f1f435f56393dc5a/pymupdf-1.26.7-cp310-abi3-win32.whl", hash = "sha256:7c9645b6f5452629c747690190350213d3e5bbdb6b2eca227d82702b327f6eee", size = 17203888, upload-time = "2025-12-12T13:59:57.613Z" },
+ { url = "https://files.pythonhosted.org/packages/dd/c3/d0047678146c294469c33bae167c8ace337deafb736b0bf97b9bc481aa65/pymupdf-1.26.7-cp310-abi3-win_amd64.whl", hash = "sha256:425b1befe40d41b72eb0fe211711c7ae334db5eb60307e9dd09066ed060cceba", size = 18405952, upload-time = "2025-12-11T21:48:02.947Z" },
]
[[package]]
-name = "pypdf"
-version = "6.1.1"
+name = "pyparsing"
+version = "3.2.3"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/a6/85/4c0f12616db83c2e3ef580c3cfa98bd082e88fc8d02e136bad3bede1e3fa/pypdf-6.1.1.tar.gz", hash = "sha256:10f44d49bf2a82e54c3c5ba3cdcbb118f2a44fc57df8ce51d6fb9b1ed9bfbe8b", size = 5074507, upload-time = "2025-09-28T13:29:16.165Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/bb/22/f1129e69d94ffff626bdb5c835506b3a5b4f3d070f17ea295e12c2c6f60f/pyparsing-3.2.3.tar.gz", hash = "sha256:b9c13f1ab8b3b542f72e28f634bad4de758ab3ce4546e4301970ad6fa77c38be", size = 1088608, upload-time = "2025-03-25T05:01:28.114Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/07/ed/adae13756d9dabdddee483fc7712905bb5585fbf6e922b1a19aca3a29cd1/pypdf-6.1.1-py3-none-any.whl", hash = "sha256:7781f99493208a37a7d4275601d883e19af24e62a525c25844d22157c2e4cde7", size = 323455, upload-time = "2025-09-28T13:29:14.392Z" },
+ { url = "https://files.pythonhosted.org/packages/05/e7/df2285f3d08fee213f2d041540fa4fc9ca6c2d44cf36d3a035bf2a8d2bcc/pyparsing-3.2.3-py3-none-any.whl", hash = "sha256:a749938e02d6fd0b59b356ca504a24982314bb090c383e3cf201c95ef7e2bfcf", size = 111120, upload-time = "2025-03-25T05:01:24.908Z" },
]
[[package]]
@@ -3782,20 +3786,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/51/64/bcf8632ed2b7a36bbf84a0544885ffa1d0b4bcf25cc0903dba66ec5fdad9/pytube-15.0.0-py3-none-any.whl", hash = "sha256:07b9904749e213485780d7eb606e5e5b8e4341aa4dccf699160876da00e12d78", size = 57594, upload-time = "2023-05-07T19:38:59.191Z" },
]
-[[package]]
-name = "pyvis"
-version = "0.3.2"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "ipython" },
- { name = "jinja2" },
- { name = "jsonpickle" },
- { name = "networkx" },
-]
-wheels = [
- { url = "https://files.pythonhosted.org/packages/ab/4b/e37e4e5d5ee1179694917b445768bdbfb084f5a59ecd38089d3413d4c70f/pyvis-0.3.2-py3-none-any.whl", hash = "sha256:5720c4ca8161dc5d9ab352015723abb7a8bb8fb443edeb07f7a322db34a97555", size = 756038, upload-time = "2023-02-24T20:29:46.758Z" },
-]
-
[[package]]
name = "pywin32"
version = "311"
@@ -3877,7 +3867,7 @@ wheels = [
[[package]]
name = "requests"
-version = "2.32.4"
+version = "2.32.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
@@ -3885,9 +3875,9 @@ dependencies = [
{ name = "idna" },
{ name = "urllib3" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/e1/0a/929373653770d8a0d7ea76c37de6e41f11eb07559b103b1c02cafb3f7cf8/requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422", size = 135258, upload-time = "2025-06-09T16:43:07.34Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/7c/e4/56027c4a6b4ae70ca9de302488c5ca95ad4a39e190093d6c1a8ace08341b/requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c", size = 64847, upload-time = "2025-06-09T16:43:05.728Z" },
+ { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
]
[[package]]
@@ -3920,15 +3910,11 @@ name = "research-crew"
version = "0.1.0"
source = { editable = "samples/crewai/research-crew" }
dependencies = [
- { name = "crewai", extra = ["tools"] },
{ name = "kagent-crewai" },
]
[package.metadata]
-requires-dist = [
- { name = "crewai", extras = ["tools"], specifier = ">=0.193.2,<1.0.0" },
- { name = "kagent-crewai", editable = "packages/kagent-crewai" },
-]
+requires-dist = [{ name = "kagent-crewai", editable = "packages/kagent-crewai" }]
[[package]]
name = "rich"
@@ -4173,42 +4159,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/10/c78f463b4ef22eef8491f218f692be838282cd65480f6e423d7730dfd1fb/sse_starlette-3.0.2-py3-none-any.whl", hash = "sha256:16b7cbfddbcd4eaca11f7b586f3b8a080f1afe952c15813455b162edea619e5a", size = 11297, upload-time = "2025-07-27T09:07:43.268Z" },
]
-[[package]]
-name = "stack-data"
-version = "0.6.3"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "asttokens" },
- { name = "executing" },
- { name = "pure-eval" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/28/e3/55dcc2cfbc3ca9c29519eb6884dd1415ecb53b0e934862d3559ddcb7e20b/stack_data-0.6.3.tar.gz", hash = "sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9", size = 44707, upload-time = "2023-09-30T13:58:05.479Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/f1/7b/ce1eafaf1a76852e2ec9b22edecf1daa58175c090266e9f6c64afcd81d91/stack_data-0.6.3-py3-none-any.whl", hash = "sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695", size = 24521, upload-time = "2023-09-30T13:58:03.53Z" },
-]
-
-[[package]]
-name = "stagehand"
-version = "0.5.2"
-source = { registry = "https://pypi.org/simple" }
-dependencies = [
- { name = "anthropic" },
- { name = "browserbase" },
- { name = "httpx" },
- { name = "litellm" },
- { name = "nest-asyncio" },
- { name = "openai" },
- { name = "playwright" },
- { name = "pydantic" },
- { name = "python-dotenv" },
- { name = "requests" },
- { name = "rich" },
-]
-sdist = { url = "https://files.pythonhosted.org/packages/46/54/17dd3bc699c755c9f8c6fe3da0dff1649a1a04c9f79e0693a7cb620666c8/stagehand-0.5.2.tar.gz", hash = "sha256:bee84bb541786a3328fca7a6d45d4b9f231389c84d714fbd66cef54d915b94bb", size = 95576, upload-time = "2025-08-28T20:57:29.957Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/54/7a/32a82b764030bd8afa9f0a9f012ed88620b5caf248b19e0f49950072d91d/stagehand-0.5.2-py3-none-any.whl", hash = "sha256:d0d8cd26fbc9b58be7fb5212296ee12c682675281accc247f2b0443cb03f59b8", size = 106686, upload-time = "2025-08-28T20:57:28.631Z" },
-]
-
[[package]]
name = "starlette"
version = "0.50.0"
@@ -4326,12 +4276,28 @@ wheels = [
]
[[package]]
-name = "traitlets"
-version = "5.14.3"
+name = "ty"
+version = "0.0.9"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7", size = 161621, upload-time = "2024-04-19T11:11:49.746Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/19/7b/4f677c622d58563c593c32081f8a8572afd90e43dc15b0dedd27b4305038/ty-0.0.9.tar.gz", hash = "sha256:83f980c46df17586953ab3060542915827b43c4748a59eea04190c59162957fe", size = 4858642, upload-time = "2026-01-05T12:24:56.528Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f", size = 85359, upload-time = "2024-04-19T11:11:46.763Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/3f/c1ee119738b401a8081ff84341781122296b66982e5982e6f162d946a1ff/ty-0.0.9-py3-none-linux_armv6l.whl", hash = "sha256:dd270d4dd6ebeb0abb37aee96cbf9618610723677f500fec1ba58f35bfa8337d", size = 9763596, upload-time = "2026-01-05T12:24:37.43Z" },
+ { url = "https://files.pythonhosted.org/packages/63/41/6b0669ef4cd806d4bd5c30263e6b732a362278abac1bc3a363a316cde896/ty-0.0.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:debfb2ba418b00e86ffd5403cb666b3f04e16853f070439517dd1eaaeeff9255", size = 9591514, upload-time = "2026-01-05T12:24:26.891Z" },
+ { url = "https://files.pythonhosted.org/packages/02/a1/874aa756aee5118e690340a771fb9ded0d0c2168c0b7cc7d9561c2a750b0/ty-0.0.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:107c76ebb05a13cdb669172956421f7ffd289ad98f36d42a44a465588d434d58", size = 9097773, upload-time = "2026-01-05T12:24:14.442Z" },
+ { url = "https://files.pythonhosted.org/packages/32/62/cb9a460cf03baab77b3361d13106b93b40c98e274d07c55f333ce3c716f6/ty-0.0.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6868ca5c87ca0caa1b3cb84603c767356242b0659b88307eda69b2fb0bfa416b", size = 9581824, upload-time = "2026-01-05T12:24:35.074Z" },
+ { url = "https://files.pythonhosted.org/packages/5a/97/633ecb348c75c954f09f8913669de8c440b13b43ea7d214503f3f1c4bb60/ty-0.0.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d14a4aa0eb5c1d3591c2adbdda4e44429a6bb5d2e298a704398bb2a7ccdafdfe", size = 9591050, upload-time = "2026-01-05T12:24:08.804Z" },
+ { url = "https://files.pythonhosted.org/packages/6f/e6/4b0c6a7a8a234e2113f88c80cc7aaa9af5868de7a693859f3c49da981934/ty-0.0.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01bd4466504cefa36b465c6608e9af4504415fa67f6affc01c7d6ce36663c7f4", size = 10018262, upload-time = "2026-01-05T12:24:53.791Z" },
+ { url = "https://files.pythonhosted.org/packages/cb/97/076d72a028f6b31e0b87287aa27c5b71a2f9927ee525260ea9f2f56828b8/ty-0.0.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:76c8253d1b30bc2c3eaa1b1411a1c34423decde0f4de0277aa6a5ceacfea93d9", size = 10911642, upload-time = "2026-01-05T12:24:48.264Z" },
+ { url = "https://files.pythonhosted.org/packages/3f/5a/705d6a5ed07ea36b1f23592c3f0dbc8fc7649267bfbb3bf06464cdc9a98a/ty-0.0.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8992fa4a9c6a5434eae4159fdd4842ec8726259bfd860e143ab95d078de6f8e3", size = 10632468, upload-time = "2026-01-05T12:24:24.118Z" },
+ { url = "https://files.pythonhosted.org/packages/44/78/4339a254537488d62bf392a936b3ec047702c0cc33d6ce3a5d613f275cd0/ty-0.0.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c79d503d151acb4a145a3d98702d07cb641c47292f63e5ffa0151e4020a5d33", size = 10273422, upload-time = "2026-01-05T12:24:45.8Z" },
+ { url = "https://files.pythonhosted.org/packages/90/40/e7f386e87c9abd3670dcee8311674d7e551baa23b2e4754e2405976e6c92/ty-0.0.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7a7ebf89ed276b564baa1f0dd9cd708e7b5aa89f19ce1b2f7d7132075abf93e", size = 10120289, upload-time = "2026-01-05T12:24:17.424Z" },
+ { url = "https://files.pythonhosted.org/packages/f7/46/1027442596e725c50d0d1ab5179e9fa78a398ab412994b3006d0ee0899c7/ty-0.0.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ae3866e50109d2400a886bb11d9ef607f23afc020b226af773615cf82ae61141", size = 9566657, upload-time = "2026-01-05T12:24:51.048Z" },
+ { url = "https://files.pythonhosted.org/packages/56/be/df921cf1967226aa01690152002b370a7135c6cced81e86c12b86552cdc4/ty-0.0.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:185244a5eacfcd8f5e2d85b95e4276316772f1e586520a6cb24aa072ec1bac26", size = 9610334, upload-time = "2026-01-05T12:24:20.334Z" },
+ { url = "https://files.pythonhosted.org/packages/ac/e8/f085268860232cc92ebe95415e5c8640f7f1797ac3a49ddd137c6222924d/ty-0.0.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:f834ff27d940edb24b2e86bbb3fb45ab9e07cf59ca8c5ac615095b2542786408", size = 9726701, upload-time = "2026-01-05T12:24:29.785Z" },
+ { url = "https://files.pythonhosted.org/packages/42/b4/9394210c66041cd221442e38f68a596945103d9446ece505889ffa9b3da9/ty-0.0.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:773f4b3ba046de952d7c1ad3a2c09b24f3ed4bc8342ae3cbff62ebc14aa6d48c", size = 10227082, upload-time = "2026-01-05T12:24:40.132Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/9f/75951eb573b473d35dd9570546fc1319f7ca2d5b5c50a5825ba6ea6cb33a/ty-0.0.9-py3-none-win32.whl", hash = "sha256:1f20f67e373038ff20f36d5449e787c0430a072b92d5933c5b6e6fc79d3de4c8", size = 9176458, upload-time = "2026-01-05T12:24:32.559Z" },
+ { url = "https://files.pythonhosted.org/packages/9b/80/b1cdf71ac874e72678161e25e2326a7d30bc3489cd3699561355a168e54f/ty-0.0.9-py3-none-win_amd64.whl", hash = "sha256:2c415f3bbb730f8de2e6e0b3c42eb3a91f1b5fbbcaaead2e113056c3b361c53c", size = 10040479, upload-time = "2026-01-05T12:24:42.697Z" },
+ { url = "https://files.pythonhosted.org/packages/b5/8f/abc75c4bb774b12698629f02d0d12501b0a7dff9c31dc3bd6b6c6467e90a/ty-0.0.9-py3-none-win_arm64.whl", hash = "sha256:48e339d794542afeed710ea4f846ead865cc38cecc335a9c781804d02eaa2722", size = 9543127, upload-time = "2026-01-05T12:24:11.731Z" },
]
[[package]]
@@ -4349,6 +4315,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317, upload-time = "2025-05-26T14:30:30.523Z" },
]
+[[package]]
+name = "types-requests"
+version = "2.32.4.20250913"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "urllib3" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/36/27/489922f4505975b11de2b5ad07b4fe1dca0bca9be81a703f26c5f3acfce5/types_requests-2.32.4.20250913.tar.gz", hash = "sha256:abd6d4f9ce3a9383f269775a9835a4c24e5cd6b9f647d64f88aa4613c33def5d", size = 23113, upload-time = "2025-09-13T02:40:02.309Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/2a/20/9a227ea57c1285986c4cf78400d0a91615d25b24e257fd9e2969606bdfae/types_requests-2.32.4.20250913-py3-none-any.whl", hash = "sha256:78c9c1fffebbe0fa487a418e0fa5252017e9c60d1a2da394077f1780f655d7e1", size = 20658, upload-time = "2025-09-13T02:40:01.115Z" },
+]
+
[[package]]
name = "typing-extensions"
version = "4.14.1"
@@ -4360,14 +4338,14 @@ wheels = [
[[package]]
name = "typing-inspection"
-version = "0.4.1"
+version = "0.4.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "typing-extensions" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" }
wheels = [
- { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
+ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" },
]
[[package]]
@@ -4473,6 +4451,20 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/63/9a/0962b05b308494e3202d3f794a6e85abe471fe3cafdbcf95c2e8c713aabd/uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553", size = 4660018, upload-time = "2024-10-14T23:38:10.888Z" },
]
+[[package]]
+name = "virtualenv"
+version = "20.35.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "distlib" },
+ { name = "filelock" },
+ { name = "platformdirs" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/20/28/e6f1a6f655d620846bd9df527390ecc26b3805a0c5989048c210e22c5ca9/virtualenv-20.35.4.tar.gz", hash = "sha256:643d3914d73d3eeb0c552cbb12d7e82adf0e504dbf86a3182f8771a153a1971c", size = 6028799, upload-time = "2025-10-29T06:57:40.511Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/0c/c05523fa3181fdf0c9c52a6ba91a23fbf3246cc095f26f6516f9c60e6771/virtualenv-20.35.4-py3-none-any.whl", hash = "sha256:c21c9cede36c9753eeade68ba7d523529f228a403463376cf821eaae2b650f1b", size = 6005095, upload-time = "2025-10-29T06:57:37.598Z" },
+]
+
[[package]]
name = "watchdog"
version = "6.0.0"
@@ -4548,15 +4540,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/32/fa/a4f5c2046385492b2273213ef815bf71a0d4c1943b784fb904e184e30201/watchfiles-1.1.0-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:af06c863f152005c7592df1d6a7009c836a247c9d8adb78fef8575a5a98699db", size = 623315, upload-time = "2025-06-15T19:06:29.076Z" },
]
-[[package]]
-name = "wcwidth"
-version = "0.2.14"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/24/30/6b0809f4510673dc723187aeaf24c7f5459922d01e2f794277a3dfb90345/wcwidth-0.2.14.tar.gz", hash = "sha256:4d478375d31bc5395a3c55c40ccdf3354688364cd61c4f6adacaa9215d0b3605", size = 102293, upload-time = "2025-09-22T16:29:53.023Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/af/b5/123f13c975e9f27ab9c0770f514345bd406d0e8d3b7a0723af9d43f710af/wcwidth-0.2.14-py2.py3-none-any.whl", hash = "sha256:a7bb560c8aee30f9957e5f9895805edd20602f2d7f720186dfd906e82b4982e1", size = 37286, upload-time = "2025-09-22T16:29:51.641Z" },
-]
-
[[package]]
name = "websocket-client"
version = "1.8.0"