Spaces:
Running
Running
Commit
·
2ca459e
1
Parent(s):
4d8a789
ported the hf jobs tool to python
Browse files- agent/config_local_mcp.json +0 -16
- agent/core/__init__.py +1 -2
- agent/core/tools.py +8 -88
- agent/tests/__init__.py +0 -3
- agent/tests/unit/test_base.py +0 -10
- agent/tools/__init__.py +8 -0
- agent/tools/hf/__init__.py +0 -7
- agent/tools/hf/base.py +0 -84
- agent/tools/hf/jobs/__init__.py +0 -6
- agent/tools/hf/jobs/api_client.py +0 -156
- agent/tools/hf/jobs/job_utils.py +0 -130
- agent/tools/hf/jobs/uv_utils.py +0 -66
- agent/tools/{hf/jobs/jobs_tool.py → jobs_tool.py} +471 -287
- agent/tools/{hf/types.py → types.py} +3 -1
- agent/tools/{hf/utilities.py → utilities.py} +49 -40
- pyproject.toml +1 -0
- tests/__init__.py +0 -0
- tests/integration/__init__.py +0 -0
- tests/integration/tools/__init__.py +0 -0
- tests/integration/tools/test_jobs_integration.py +452 -0
- tests/tools/__init__.py +0 -1
- tests/tools/hf/__init__.py +0 -1
- tests/tools/hf/jobs/__init__.py +0 -1
- tests/tools/hf/jobs/test_jobs_tool.py +0 -252
- tests/unit/__init__.py +0 -0
- tests/unit/tools/__init__.py +0 -0
- tests/unit/tools/test_jobs_tool.py +454 -0
- uv.lock +36 -0
agent/config_local_mcp.json
DELETED
|
@@ -1,16 +0,0 @@
|
|
| 1 |
-
{
|
| 2 |
-
"model_name": "anthropic/claude-sonnet-4-5-20250929",
|
| 3 |
-
"tools": [],
|
| 4 |
-
"system_prompt_path": "",
|
| 5 |
-
"mcpServers": {
|
| 6 |
-
"hf-mcp-server": {
|
| 7 |
-
"command": "node",
|
| 8 |
-
"args": [
|
| 9 |
-
"/Users/akseljoonas/Documents/hf-agent/hf-mcp-server/packages/app/dist/server/stdio.js"
|
| 10 |
-
],
|
| 11 |
-
"env": {
|
| 12 |
-
"DEFAULT_HF_TOKEN": "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"
|
| 13 |
-
}
|
| 14 |
-
}
|
| 15 |
-
}
|
| 16 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agent/core/__init__.py
CHANGED
|
@@ -3,10 +3,9 @@ Core agent implementation
|
|
| 3 |
Contains the main agent logic, decision-making, and orchestration
|
| 4 |
"""
|
| 5 |
|
| 6 |
-
from agent.core.tools import ToolRouter, ToolSpec
|
| 7 |
|
| 8 |
__all__ = [
|
| 9 |
"ToolRouter",
|
| 10 |
"ToolSpec",
|
| 11 |
-
"create_builtin_tools",
|
| 12 |
]
|
|
|
|
| 3 |
Contains the main agent logic, decision-making, and orchestration
|
| 4 |
"""
|
| 5 |
|
| 6 |
+
from agent.core.tools import ToolRouter, ToolSpec
|
| 7 |
|
| 8 |
__all__ = [
|
| 9 |
"ToolRouter",
|
| 10 |
"ToolSpec",
|
|
|
|
| 11 |
]
|
agent/core/tools.py
CHANGED
|
@@ -3,7 +3,6 @@ Tool system for the agent
|
|
| 3 |
Provides ToolSpec and ToolRouter for managing both built-in and MCP tools
|
| 4 |
"""
|
| 5 |
|
| 6 |
-
import subprocess
|
| 7 |
import warnings
|
| 8 |
from dataclasses import dataclass
|
| 9 |
from typing import Any, Awaitable, Callable, Optional
|
|
@@ -13,13 +12,15 @@ from lmnr import observe
|
|
| 13 |
from mcp.types import EmbeddedResource, ImageContent, TextContent
|
| 14 |
|
| 15 |
from agent.config import MCPServerConfig
|
| 16 |
-
from agent.tools.
|
| 17 |
|
| 18 |
# Suppress aiohttp deprecation warning
|
| 19 |
warnings.filterwarnings(
|
| 20 |
"ignore", category=DeprecationWarning, module="aiohttp.connector"
|
| 21 |
)
|
| 22 |
|
|
|
|
|
|
|
| 23 |
|
| 24 |
def convert_mcp_content_to_string(content: list) -> str:
|
| 25 |
"""
|
|
@@ -105,6 +106,10 @@ class ToolRouter:
|
|
| 105 |
async def register_mcp_tools(self) -> None:
|
| 106 |
tools = await self.mcp_client.list_tools()
|
| 107 |
for tool in tools:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 108 |
self.register_tool(
|
| 109 |
ToolSpec(
|
| 110 |
name=tool.name,
|
|
@@ -174,95 +179,10 @@ class ToolRouter:
|
|
| 174 |
# ============================================================================
|
| 175 |
|
| 176 |
|
| 177 |
-
async def bash_handler(arguments: dict[str, Any]) -> tuple[str, bool]:
|
| 178 |
-
"""Execute bash command"""
|
| 179 |
-
try:
|
| 180 |
-
command = arguments.get("command", "")
|
| 181 |
-
result = subprocess.run(
|
| 182 |
-
command, shell=True, capture_output=True, text=True, timeout=30
|
| 183 |
-
)
|
| 184 |
-
output = result.stdout if result.returncode == 0 else result.stderr
|
| 185 |
-
success = result.returncode == 0
|
| 186 |
-
return output, success
|
| 187 |
-
except Exception as e:
|
| 188 |
-
return f"Error: {str(e)}", False
|
| 189 |
-
|
| 190 |
-
|
| 191 |
-
async def read_file_handler(arguments: dict[str, Any]) -> tuple[str, bool]:
|
| 192 |
-
"""Read file contents"""
|
| 193 |
-
try:
|
| 194 |
-
path = arguments.get("path", "")
|
| 195 |
-
with open(path, "r") as f:
|
| 196 |
-
content = f.read()
|
| 197 |
-
return content, True
|
| 198 |
-
except Exception as e:
|
| 199 |
-
return f"Error reading file: {str(e)}", False
|
| 200 |
-
|
| 201 |
-
|
| 202 |
-
async def write_file_handler(arguments: dict[str, Any]) -> tuple[str, bool]:
|
| 203 |
-
"""Write to file"""
|
| 204 |
-
try:
|
| 205 |
-
path = arguments.get("path", "")
|
| 206 |
-
content = arguments.get("content", "")
|
| 207 |
-
with open(path, "w") as f:
|
| 208 |
-
f.write(content)
|
| 209 |
-
return f"Successfully wrote to {path}", True
|
| 210 |
-
except Exception as e:
|
| 211 |
-
return f"Error writing file: {str(e)}", False
|
| 212 |
-
|
| 213 |
-
|
| 214 |
def create_builtin_tools() -> list[ToolSpec]:
|
| 215 |
"""Create built-in tool specifications"""
|
|
|
|
| 216 |
return [
|
| 217 |
-
ToolSpec(
|
| 218 |
-
name="bash",
|
| 219 |
-
description="Execute a bash command and return its output",
|
| 220 |
-
parameters={
|
| 221 |
-
"type": "object",
|
| 222 |
-
"properties": {
|
| 223 |
-
"command": {
|
| 224 |
-
"type": "string",
|
| 225 |
-
"description": "The bash command to execute",
|
| 226 |
-
}
|
| 227 |
-
},
|
| 228 |
-
"required": ["command"],
|
| 229 |
-
},
|
| 230 |
-
handler=bash_handler,
|
| 231 |
-
),
|
| 232 |
-
ToolSpec(
|
| 233 |
-
name="read_file",
|
| 234 |
-
description="Read the contents of a file",
|
| 235 |
-
parameters={
|
| 236 |
-
"type": "object",
|
| 237 |
-
"properties": {
|
| 238 |
-
"path": {
|
| 239 |
-
"type": "string",
|
| 240 |
-
"description": "Path to the file to read",
|
| 241 |
-
}
|
| 242 |
-
},
|
| 243 |
-
"required": ["path"],
|
| 244 |
-
},
|
| 245 |
-
handler=read_file_handler,
|
| 246 |
-
),
|
| 247 |
-
ToolSpec(
|
| 248 |
-
name="write_file",
|
| 249 |
-
description="Write content to a file",
|
| 250 |
-
parameters={
|
| 251 |
-
"type": "object",
|
| 252 |
-
"properties": {
|
| 253 |
-
"path": {
|
| 254 |
-
"type": "string",
|
| 255 |
-
"description": "Path to the file to write",
|
| 256 |
-
},
|
| 257 |
-
"content": {
|
| 258 |
-
"type": "string",
|
| 259 |
-
"description": "Content to write to the file",
|
| 260 |
-
},
|
| 261 |
-
},
|
| 262 |
-
"required": ["path", "content"],
|
| 263 |
-
},
|
| 264 |
-
handler=write_file_handler,
|
| 265 |
-
),
|
| 266 |
ToolSpec(
|
| 267 |
name=HF_JOBS_TOOL_SPEC["name"],
|
| 268 |
description=HF_JOBS_TOOL_SPEC["description"],
|
|
|
|
| 3 |
Provides ToolSpec and ToolRouter for managing both built-in and MCP tools
|
| 4 |
"""
|
| 5 |
|
|
|
|
| 6 |
import warnings
|
| 7 |
from dataclasses import dataclass
|
| 8 |
from typing import Any, Awaitable, Callable, Optional
|
|
|
|
| 12 |
from mcp.types import EmbeddedResource, ImageContent, TextContent
|
| 13 |
|
| 14 |
from agent.config import MCPServerConfig
|
| 15 |
+
from agent.tools.jobs_tool import HF_JOBS_TOOL_SPEC, hf_jobs_handler
|
| 16 |
|
| 17 |
# Suppress aiohttp deprecation warning
|
| 18 |
warnings.filterwarnings(
|
| 19 |
"ignore", category=DeprecationWarning, module="aiohttp.connector"
|
| 20 |
)
|
| 21 |
|
| 22 |
+
NOT_ALLOWED_TOOL_NAMES = ["hf_jobs"]
|
| 23 |
+
|
| 24 |
|
| 25 |
def convert_mcp_content_to_string(content: list) -> str:
|
| 26 |
"""
|
|
|
|
| 106 |
async def register_mcp_tools(self) -> None:
|
| 107 |
tools = await self.mcp_client.list_tools()
|
| 108 |
for tool in tools:
|
| 109 |
+
if tool.name in NOT_ALLOWED_TOOL_NAMES:
|
| 110 |
+
print(f"Skipping not MCP allowed tool: {tool.name}")
|
| 111 |
+
continue
|
| 112 |
+
print(f"MCP Tool: {tool.name}")
|
| 113 |
self.register_tool(
|
| 114 |
ToolSpec(
|
| 115 |
name=tool.name,
|
|
|
|
| 179 |
# ============================================================================
|
| 180 |
|
| 181 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 182 |
def create_builtin_tools() -> list[ToolSpec]:
|
| 183 |
"""Create built-in tool specifications"""
|
| 184 |
+
print(f"Creating built-in tools: {HF_JOBS_TOOL_SPEC['name']}")
|
| 185 |
return [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 186 |
ToolSpec(
|
| 187 |
name=HF_JOBS_TOOL_SPEC["name"],
|
| 188 |
description=HF_JOBS_TOOL_SPEC["description"],
|
agent/tests/__init__.py
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Test suite for HF Agent
|
| 3 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
agent/tests/unit/test_base.py
DELETED
|
@@ -1,10 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Unit tests for base agent components
|
| 3 |
-
"""
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
def test_base_agent_initialization():
|
| 7 |
-
"""Test that BaseAgent can be initialized with config"""
|
| 8 |
-
# This will fail because BaseAgent is abstract
|
| 9 |
-
# Subclasses should implement this
|
| 10 |
-
pass
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agent/tools/__init__.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Hugging Face tools for the agent
|
| 3 |
+
"""
|
| 4 |
+
|
| 5 |
+
from agent.tools.jobs_tool import HF_JOBS_TOOL_SPEC, HfJobsTool, hf_jobs_handler
|
| 6 |
+
from agent.tools.types import ToolResult
|
| 7 |
+
|
| 8 |
+
__all__ = ["ToolResult", "HF_JOBS_TOOL_SPEC", "hf_jobs_handler", HfJobsTool]
|
agent/tools/hf/__init__.py
DELETED
|
@@ -1,7 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Hugging Face tools for the agent
|
| 3 |
-
"""
|
| 4 |
-
from agent.tools.hf.types import ToolResult
|
| 5 |
-
from agent.tools.hf.base import HfApiCall, HfApiError
|
| 6 |
-
|
| 7 |
-
__all__ = ['ToolResult', 'HfApiCall', 'HfApiError']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agent/tools/hf/base.py
DELETED
|
@@ -1,84 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Base API client for Hugging Face API
|
| 3 |
-
|
| 4 |
-
Ported from: hf-mcp-server/packages/mcp/src/hf-api-call.ts
|
| 5 |
-
"""
|
| 6 |
-
import os
|
| 7 |
-
from typing import Optional, Dict, Any, TypeVar, Generic
|
| 8 |
-
import httpx
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
TResponse = TypeVar('TResponse')
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
class HfApiError(Exception):
|
| 15 |
-
"""Error from Hugging Face API"""
|
| 16 |
-
|
| 17 |
-
def __init__(
|
| 18 |
-
self,
|
| 19 |
-
message: str,
|
| 20 |
-
status: Optional[int] = None,
|
| 21 |
-
status_text: Optional[str] = None,
|
| 22 |
-
response_body: Optional[str] = None
|
| 23 |
-
):
|
| 24 |
-
super().__init__(message)
|
| 25 |
-
self.message = message
|
| 26 |
-
self.status = status
|
| 27 |
-
self.status_text = status_text
|
| 28 |
-
self.response_body = response_body
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
class HfApiCall(Generic[TResponse]):
|
| 32 |
-
"""Base class for making authenticated API calls to Hugging Face"""
|
| 33 |
-
|
| 34 |
-
def __init__(
|
| 35 |
-
self,
|
| 36 |
-
api_url: str,
|
| 37 |
-
hf_token: Optional[str] = None,
|
| 38 |
-
api_timeout: Optional[float] = None
|
| 39 |
-
):
|
| 40 |
-
self.api_url = api_url
|
| 41 |
-
self.hf_token = hf_token or os.getenv('HF_TOKEN')
|
| 42 |
-
self.api_timeout = api_timeout or float(os.getenv('HF_API_TIMEOUT', '12.5'))
|
| 43 |
-
|
| 44 |
-
async def fetch_from_api(
|
| 45 |
-
self,
|
| 46 |
-
url: str,
|
| 47 |
-
method: str = "GET",
|
| 48 |
-
json: Optional[Dict[str, Any]] = None,
|
| 49 |
-
**kwargs
|
| 50 |
-
) -> Optional[Any]:
|
| 51 |
-
"""Fetch data from API with auth and error handling"""
|
| 52 |
-
headers = {
|
| 53 |
-
"Accept": "application/json",
|
| 54 |
-
**kwargs.pop("headers", {})
|
| 55 |
-
}
|
| 56 |
-
|
| 57 |
-
if self.hf_token:
|
| 58 |
-
headers["Authorization"] = f"Bearer {self.hf_token}"
|
| 59 |
-
|
| 60 |
-
async with httpx.AsyncClient(timeout=self.api_timeout) as client:
|
| 61 |
-
if method == "GET":
|
| 62 |
-
response = await client.get(url, headers=headers, **kwargs)
|
| 63 |
-
elif method == "POST":
|
| 64 |
-
response = await client.post(url, headers=headers, json=json, **kwargs)
|
| 65 |
-
elif method == "DELETE":
|
| 66 |
-
response = await client.delete(url, headers=headers, **kwargs)
|
| 67 |
-
elif method == "PUT":
|
| 68 |
-
response = await client.put(url, headers=headers, json=json, **kwargs)
|
| 69 |
-
else:
|
| 70 |
-
raise ValueError(f"Unsupported HTTP method: {method}")
|
| 71 |
-
|
| 72 |
-
if not response.is_success:
|
| 73 |
-
raise HfApiError(
|
| 74 |
-
message=f"API request failed: {response.status_code} {response.reason_phrase}",
|
| 75 |
-
status=response.status_code,
|
| 76 |
-
status_text=response.reason_phrase,
|
| 77 |
-
response_body=response.text
|
| 78 |
-
)
|
| 79 |
-
|
| 80 |
-
# Handle empty responses (DELETE often returns empty)
|
| 81 |
-
if not response.text:
|
| 82 |
-
return None
|
| 83 |
-
|
| 84 |
-
return response.json()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agent/tools/hf/jobs/__init__.py
DELETED
|
@@ -1,6 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Hugging Face Jobs tool
|
| 3 |
-
"""
|
| 4 |
-
from agent.tools.hf.jobs.jobs_tool import HfJobsTool, HF_JOBS_TOOL_SPEC, hf_jobs_handler
|
| 5 |
-
|
| 6 |
-
__all__ = ['HfJobsTool', 'HF_JOBS_TOOL_SPEC', 'hf_jobs_handler']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agent/tools/hf/jobs/api_client.py
DELETED
|
@@ -1,156 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Jobs API Client
|
| 3 |
-
|
| 4 |
-
Ported from: hf-mcp-server/packages/mcp/src/jobs/api-client.ts
|
| 5 |
-
"""
|
| 6 |
-
from typing import Optional, Dict, Any, List
|
| 7 |
-
from agent.tools.hf.base import HfApiCall
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
class JobsApiClient(HfApiCall):
|
| 11 |
-
"""API client for HuggingFace Jobs API"""
|
| 12 |
-
|
| 13 |
-
def __init__(self, hf_token: Optional[str] = None, namespace: Optional[str] = None):
|
| 14 |
-
super().__init__('https://huggingface.co/api', hf_token)
|
| 15 |
-
self.namespace_cache = namespace
|
| 16 |
-
|
| 17 |
-
async def get_namespace(self, namespace: Optional[str] = None) -> str:
|
| 18 |
-
"""
|
| 19 |
-
Get the namespace (username or org) for the current user
|
| 20 |
-
Uses cached value or /api/whoami-v2 endpoint as fallback
|
| 21 |
-
"""
|
| 22 |
-
if namespace:
|
| 23 |
-
return namespace
|
| 24 |
-
|
| 25 |
-
if self.namespace_cache:
|
| 26 |
-
return self.namespace_cache
|
| 27 |
-
|
| 28 |
-
# Fetch from whoami endpoint
|
| 29 |
-
whoami = await self.fetch_from_api('https://huggingface.co/api/whoami-v2')
|
| 30 |
-
self.namespace_cache = whoami['name']
|
| 31 |
-
return self.namespace_cache
|
| 32 |
-
|
| 33 |
-
async def run_job(self, job_spec: Dict[str, Any], namespace: Optional[str] = None) -> Dict[str, Any]:
|
| 34 |
-
"""
|
| 35 |
-
Run a job
|
| 36 |
-
POST /api/jobs/{namespace}
|
| 37 |
-
"""
|
| 38 |
-
ns = await self.get_namespace(namespace)
|
| 39 |
-
url = f'https://huggingface.co/api/jobs/{ns}'
|
| 40 |
-
|
| 41 |
-
result = await self.fetch_from_api(url, method='POST', json=job_spec)
|
| 42 |
-
return result
|
| 43 |
-
|
| 44 |
-
async def list_jobs(self, namespace: Optional[str] = None) -> List[Dict[str, Any]]:
|
| 45 |
-
"""
|
| 46 |
-
List all jobs for a namespace
|
| 47 |
-
GET /api/jobs/{namespace}
|
| 48 |
-
"""
|
| 49 |
-
ns = await self.get_namespace(namespace)
|
| 50 |
-
url = f'https://huggingface.co/api/jobs/{ns}'
|
| 51 |
-
|
| 52 |
-
return await self.fetch_from_api(url)
|
| 53 |
-
|
| 54 |
-
async def get_job(self, job_id: str, namespace: Optional[str] = None) -> Dict[str, Any]:
|
| 55 |
-
"""
|
| 56 |
-
Get detailed information about a specific job
|
| 57 |
-
GET /api/jobs/{namespace}/{jobId}
|
| 58 |
-
"""
|
| 59 |
-
ns = await self.get_namespace(namespace)
|
| 60 |
-
url = f'https://huggingface.co/api/jobs/{ns}/{job_id}'
|
| 61 |
-
|
| 62 |
-
return await self.fetch_from_api(url)
|
| 63 |
-
|
| 64 |
-
async def cancel_job(self, job_id: str, namespace: Optional[str] = None) -> None:
|
| 65 |
-
"""
|
| 66 |
-
Cancel a running job
|
| 67 |
-
POST /api/jobs/{namespace}/{jobId}/cancel
|
| 68 |
-
"""
|
| 69 |
-
ns = await self.get_namespace(namespace)
|
| 70 |
-
url = f'https://huggingface.co/api/jobs/{ns}/{job_id}/cancel'
|
| 71 |
-
|
| 72 |
-
await self.fetch_from_api(url, method='POST')
|
| 73 |
-
|
| 74 |
-
def get_logs_url(self, job_id: str, namespace: str) -> str:
|
| 75 |
-
"""Get logs URL for a job (for SSE streaming)"""
|
| 76 |
-
return f'https://huggingface.co/api/jobs/{namespace}/{job_id}/logs'
|
| 77 |
-
|
| 78 |
-
async def create_scheduled_job(
|
| 79 |
-
self,
|
| 80 |
-
spec: Dict[str, Any],
|
| 81 |
-
namespace: Optional[str] = None
|
| 82 |
-
) -> Dict[str, Any]:
|
| 83 |
-
"""
|
| 84 |
-
Create a scheduled job
|
| 85 |
-
POST /api/scheduled-jobs/{namespace}
|
| 86 |
-
"""
|
| 87 |
-
ns = await self.get_namespace(namespace)
|
| 88 |
-
url = f'https://huggingface.co/api/scheduled-jobs/{ns}'
|
| 89 |
-
|
| 90 |
-
return await self.fetch_from_api(url, method='POST', json=spec)
|
| 91 |
-
|
| 92 |
-
async def list_scheduled_jobs(self, namespace: Optional[str] = None) -> List[Dict[str, Any]]:
|
| 93 |
-
"""
|
| 94 |
-
List all scheduled jobs
|
| 95 |
-
GET /api/scheduled-jobs/{namespace}
|
| 96 |
-
"""
|
| 97 |
-
ns = await self.get_namespace(namespace)
|
| 98 |
-
url = f'https://huggingface.co/api/scheduled-jobs/{ns}'
|
| 99 |
-
|
| 100 |
-
return await self.fetch_from_api(url)
|
| 101 |
-
|
| 102 |
-
async def get_scheduled_job(
|
| 103 |
-
self,
|
| 104 |
-
scheduled_job_id: str,
|
| 105 |
-
namespace: Optional[str] = None
|
| 106 |
-
) -> Dict[str, Any]:
|
| 107 |
-
"""
|
| 108 |
-
Get details of a scheduled job
|
| 109 |
-
GET /api/scheduled-jobs/{namespace}/{scheduledJobId}
|
| 110 |
-
"""
|
| 111 |
-
ns = await self.get_namespace(namespace)
|
| 112 |
-
url = f'https://huggingface.co/api/scheduled-jobs/{ns}/{scheduled_job_id}'
|
| 113 |
-
|
| 114 |
-
return await self.fetch_from_api(url)
|
| 115 |
-
|
| 116 |
-
async def delete_scheduled_job(
|
| 117 |
-
self,
|
| 118 |
-
scheduled_job_id: str,
|
| 119 |
-
namespace: Optional[str] = None
|
| 120 |
-
) -> None:
|
| 121 |
-
"""
|
| 122 |
-
Delete a scheduled job
|
| 123 |
-
DELETE /api/scheduled-jobs/{namespace}/{scheduledJobId}
|
| 124 |
-
"""
|
| 125 |
-
ns = await self.get_namespace(namespace)
|
| 126 |
-
url = f'https://huggingface.co/api/scheduled-jobs/{ns}/{scheduled_job_id}'
|
| 127 |
-
|
| 128 |
-
await self.fetch_from_api(url, method='DELETE')
|
| 129 |
-
|
| 130 |
-
async def suspend_scheduled_job(
|
| 131 |
-
self,
|
| 132 |
-
scheduled_job_id: str,
|
| 133 |
-
namespace: Optional[str] = None
|
| 134 |
-
) -> None:
|
| 135 |
-
"""
|
| 136 |
-
Suspend a scheduled job
|
| 137 |
-
POST /api/scheduled-jobs/{namespace}/{scheduledJobId}/suspend
|
| 138 |
-
"""
|
| 139 |
-
ns = await self.get_namespace(namespace)
|
| 140 |
-
url = f'https://huggingface.co/api/scheduled-jobs/{ns}/{scheduled_job_id}/suspend'
|
| 141 |
-
|
| 142 |
-
await self.fetch_from_api(url, method='POST')
|
| 143 |
-
|
| 144 |
-
async def resume_scheduled_job(
|
| 145 |
-
self,
|
| 146 |
-
scheduled_job_id: str,
|
| 147 |
-
namespace: Optional[str] = None
|
| 148 |
-
) -> None:
|
| 149 |
-
"""
|
| 150 |
-
Resume a suspended scheduled job
|
| 151 |
-
POST /api/scheduled-jobs/{namespace}/{scheduledJobId}/resume
|
| 152 |
-
"""
|
| 153 |
-
ns = await self.get_namespace(namespace)
|
| 154 |
-
url = f'https://huggingface.co/api/scheduled-jobs/{ns}/{scheduled_job_id}/resume'
|
| 155 |
-
|
| 156 |
-
await self.fetch_from_api(url, method='POST')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agent/tools/hf/jobs/job_utils.py
DELETED
|
@@ -1,130 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Job utility functions
|
| 3 |
-
|
| 4 |
-
Ported from: hf-mcp-server/packages/mcp/src/jobs/commands/utils.ts
|
| 5 |
-
"""
|
| 6 |
-
import re
|
| 7 |
-
import shlex
|
| 8 |
-
from typing import Dict, Optional, Any, List, Union
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
def parse_timeout(timeout: str) -> int:
|
| 12 |
-
"""Parse timeout string (e.g., "5m", "2h", "30s") to seconds"""
|
| 13 |
-
time_units = {
|
| 14 |
-
's': 1,
|
| 15 |
-
'm': 60,
|
| 16 |
-
'h': 3600,
|
| 17 |
-
'd': 86400,
|
| 18 |
-
}
|
| 19 |
-
|
| 20 |
-
match = re.match(r'^(\d+(?:\.\d+)?)(s|m|h|d)$', timeout)
|
| 21 |
-
if match:
|
| 22 |
-
value = float(match.group(1))
|
| 23 |
-
unit = match.group(2)
|
| 24 |
-
return int(value * time_units[unit])
|
| 25 |
-
|
| 26 |
-
# Try to parse as plain number (seconds)
|
| 27 |
-
try:
|
| 28 |
-
return int(timeout)
|
| 29 |
-
except ValueError:
|
| 30 |
-
raise ValueError(
|
| 31 |
-
f'Invalid timeout format: "{timeout}". Use format like "5m", "2h", "30s", or plain seconds.'
|
| 32 |
-
)
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
def parse_image_source(image: str) -> Dict[str, Optional[str]]:
|
| 36 |
-
"""
|
| 37 |
-
Detect if image is a Space URL and extract spaceId
|
| 38 |
-
Returns {'dockerImage': ...} or {'spaceId': ...}
|
| 39 |
-
"""
|
| 40 |
-
space_prefixes = [
|
| 41 |
-
'https://huggingface.co/spaces/',
|
| 42 |
-
'https://hf.co/spaces/',
|
| 43 |
-
'huggingface.co/spaces/',
|
| 44 |
-
'hf.co/spaces/',
|
| 45 |
-
]
|
| 46 |
-
|
| 47 |
-
for prefix in space_prefixes:
|
| 48 |
-
if image.startswith(prefix):
|
| 49 |
-
return {'dockerImage': None, 'spaceId': image[len(prefix):]}
|
| 50 |
-
|
| 51 |
-
# Not a space, treat as docker image
|
| 52 |
-
return {'dockerImage': image, 'spaceId': None}
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
def parse_command(command: Union[str, List[str]]) -> Dict[str, Any]:
|
| 56 |
-
"""
|
| 57 |
-
Parse command string or array into command array
|
| 58 |
-
Uses shlex for POSIX-compliant parsing
|
| 59 |
-
"""
|
| 60 |
-
# If already an array, return as-is
|
| 61 |
-
if isinstance(command, list):
|
| 62 |
-
return {'command': command, 'arguments': []}
|
| 63 |
-
|
| 64 |
-
# Parse the command string using shlex for POSIX-compliant parsing
|
| 65 |
-
try:
|
| 66 |
-
string_args = shlex.split(command)
|
| 67 |
-
except ValueError as e:
|
| 68 |
-
raise ValueError(
|
| 69 |
-
f'Unsupported shell syntax in command: "{command}". '
|
| 70 |
-
f'Please use an array format for commands with complex shell operators. Error: {e}'
|
| 71 |
-
)
|
| 72 |
-
|
| 73 |
-
if len(string_args) == 0:
|
| 74 |
-
raise ValueError(f'Invalid command: "{command}". Command cannot be empty.')
|
| 75 |
-
|
| 76 |
-
return {'command': string_args, 'arguments': []}
|
| 77 |
-
|
| 78 |
-
|
| 79 |
-
def replace_token_placeholder(value: str, hf_token: Optional[str]) -> str:
|
| 80 |
-
"""Replace HF token placeholder with actual token if available"""
|
| 81 |
-
if not hf_token:
|
| 82 |
-
return value
|
| 83 |
-
|
| 84 |
-
if value in ('$HF_TOKEN', '${HF_TOKEN}'):
|
| 85 |
-
return hf_token
|
| 86 |
-
|
| 87 |
-
return value
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
def transform_env_map(
|
| 91 |
-
env_map: Optional[Dict[str, str]],
|
| 92 |
-
hf_token: Optional[str]
|
| 93 |
-
) -> Optional[Dict[str, str]]:
|
| 94 |
-
"""Transform environment map, replacing token placeholders"""
|
| 95 |
-
if not env_map:
|
| 96 |
-
return None
|
| 97 |
-
|
| 98 |
-
return {
|
| 99 |
-
key: replace_token_placeholder(value, hf_token)
|
| 100 |
-
for key, value in env_map.items()
|
| 101 |
-
}
|
| 102 |
-
|
| 103 |
-
|
| 104 |
-
def create_job_spec(args: Dict[str, Any]) -> Dict[str, Any]:
|
| 105 |
-
"""Create a JobSpec from run command arguments"""
|
| 106 |
-
# Validate required fields
|
| 107 |
-
if not args.get('image'):
|
| 108 |
-
raise ValueError('image parameter is required. Provide a Docker image (e.g., "python:3.12") or Space URL.')
|
| 109 |
-
if not args.get('command'):
|
| 110 |
-
raise ValueError('command parameter is required. Provide a command as string or array.')
|
| 111 |
-
|
| 112 |
-
image_source = parse_image_source(args['image'])
|
| 113 |
-
command_parsed = parse_command(args['command'])
|
| 114 |
-
timeout_seconds = parse_timeout(args['timeout']) if args.get('timeout') else None
|
| 115 |
-
environment = transform_env_map(args.get('env'), args.get('hfToken')) or {}
|
| 116 |
-
secrets = transform_env_map(args.get('secrets'), args.get('hfToken')) or {}
|
| 117 |
-
|
| 118 |
-
spec = {
|
| 119 |
-
**{k: v for k, v in image_source.items() if v is not None},
|
| 120 |
-
'command': command_parsed['command'],
|
| 121 |
-
'arguments': command_parsed['arguments'],
|
| 122 |
-
'flavor': args.get('flavor', 'cpu-basic'),
|
| 123 |
-
'environment': environment,
|
| 124 |
-
'secrets': secrets,
|
| 125 |
-
}
|
| 126 |
-
|
| 127 |
-
if timeout_seconds is not None:
|
| 128 |
-
spec['timeoutSeconds'] = timeout_seconds
|
| 129 |
-
|
| 130 |
-
return spec
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agent/tools/hf/jobs/uv_utils.py
DELETED
|
@@ -1,66 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
UV command utilities
|
| 3 |
-
|
| 4 |
-
Ported from: hf-mcp-server/packages/mcp/src/jobs/commands/uv-utils.ts
|
| 5 |
-
"""
|
| 6 |
-
import base64
|
| 7 |
-
from typing import List, Dict, Optional, Any
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
UV_DEFAULT_IMAGE = 'ghcr.io/astral-sh/uv:python3.12-bookworm'
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
def build_uv_command(script: str, args: Dict[str, Any]) -> List[str]:
|
| 14 |
-
"""Build UV run command"""
|
| 15 |
-
parts = ['uv', 'run']
|
| 16 |
-
|
| 17 |
-
# Add dependencies
|
| 18 |
-
with_deps = args.get('with_deps', [])
|
| 19 |
-
if with_deps:
|
| 20 |
-
for dep in with_deps:
|
| 21 |
-
parts.extend(['--with', dep])
|
| 22 |
-
|
| 23 |
-
# Add python version
|
| 24 |
-
python = args.get('python')
|
| 25 |
-
if python:
|
| 26 |
-
parts.extend(['-p', python])
|
| 27 |
-
|
| 28 |
-
parts.append(script)
|
| 29 |
-
|
| 30 |
-
# Add script arguments
|
| 31 |
-
script_args = args.get('script_args', [])
|
| 32 |
-
if script_args:
|
| 33 |
-
parts.extend(script_args)
|
| 34 |
-
|
| 35 |
-
return parts
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
def wrap_inline_script(script: str, args: Dict[str, Any]) -> str:
|
| 39 |
-
"""Wrap inline script with base64 encoding for UV"""
|
| 40 |
-
encoded = base64.b64encode(script.encode('utf-8')).decode('utf-8')
|
| 41 |
-
base_command = build_uv_command('-', args)
|
| 42 |
-
# Shell quote the command parts
|
| 43 |
-
quoted_command = ' '.join(base_command)
|
| 44 |
-
return f'echo "{encoded}" | base64 -d | {quoted_command}'
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
def resolve_uv_command(args: Dict[str, Any]) -> List[str]:
|
| 48 |
-
"""Resolve UV command based on script source"""
|
| 49 |
-
script_source = args.get('script', '')
|
| 50 |
-
|
| 51 |
-
options = {
|
| 52 |
-
'with_deps': args.get('with_deps'),
|
| 53 |
-
'python': args.get('python'),
|
| 54 |
-
'script_args': args.get('script_args'),
|
| 55 |
-
}
|
| 56 |
-
|
| 57 |
-
# URL script
|
| 58 |
-
if script_source.startswith('http://') or script_source.startswith('https://'):
|
| 59 |
-
return build_uv_command(script_source, options)
|
| 60 |
-
|
| 61 |
-
# Inline multi-line script
|
| 62 |
-
if '\n' in script_source:
|
| 63 |
-
return ['/bin/sh', '-lc', wrap_inline_script(script_source, options)]
|
| 64 |
-
|
| 65 |
-
# File path or single-line script
|
| 66 |
-
return build_uv_command(script_source, options)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
agent/tools/{hf/jobs/jobs_tool.py → jobs_tool.py}
RENAMED
|
@@ -1,55 +1,197 @@
|
|
| 1 |
"""
|
| 2 |
-
Hugging Face Jobs Tool -
|
| 3 |
|
| 4 |
-
|
| 5 |
"""
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
from
|
| 10 |
-
|
| 11 |
-
from
|
| 12 |
-
from
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
format_job_details,
|
|
|
|
| 17 |
format_scheduled_job_details,
|
|
|
|
| 18 |
)
|
| 19 |
|
| 20 |
-
|
| 21 |
# Hardware flavors
|
| 22 |
-
CPU_FLAVORS = [
|
| 23 |
GPU_FLAVORS = [
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
]
|
| 28 |
-
SPECIALIZED_FLAVORS = [
|
| 29 |
ALL_FLAVORS = CPU_FLAVORS + GPU_FLAVORS + SPECIALIZED_FLAVORS
|
| 30 |
|
| 31 |
# Operation names
|
| 32 |
OperationType = Literal[
|
| 33 |
-
"run",
|
| 34 |
-
"
|
| 35 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
]
|
| 37 |
|
| 38 |
# Constants
|
| 39 |
DEFAULT_LOG_WAIT_SECONDS = 10
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
|
| 42 |
class HfJobsTool:
|
| 43 |
-
"""Tool for managing Hugging Face compute jobs"""
|
| 44 |
|
| 45 |
def __init__(self, hf_token: Optional[str] = None, namespace: Optional[str] = None):
|
| 46 |
-
self.
|
| 47 |
-
self.
|
| 48 |
|
| 49 |
async def execute(self, params: Dict[str, Any]) -> ToolResult:
|
| 50 |
"""Execute the specified operation"""
|
| 51 |
-
operation = params.get(
|
| 52 |
-
args = params.get(
|
| 53 |
|
| 54 |
# If no operation provided, return usage instructions
|
| 55 |
if not operation:
|
|
@@ -59,7 +201,7 @@ class HfJobsTool:
|
|
| 59 |
operation = operation.lower()
|
| 60 |
|
| 61 |
# Check if help is requested
|
| 62 |
-
if args.get(
|
| 63 |
return self._show_operation_help(operation)
|
| 64 |
|
| 65 |
try:
|
|
@@ -93,46 +235,36 @@ class HfJobsTool:
|
|
| 93 |
else:
|
| 94 |
return {
|
| 95 |
"formatted": f'Unknown operation: "{operation}"\n\n'
|
| 96 |
-
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
"totalResults": 0,
|
| 102 |
"resultsShared": 0,
|
| 103 |
-
"isError": True
|
| 104 |
}
|
| 105 |
|
| 106 |
-
except
|
| 107 |
-
error_message = f"API Error: {e.message}"
|
| 108 |
-
if e.response_body:
|
| 109 |
-
try:
|
| 110 |
-
parsed = json.loads(e.response_body)
|
| 111 |
-
formatted_body = json.dumps(parsed, indent=2)
|
| 112 |
-
error_message += f"\n\nServer response:\n{formatted_body}"
|
| 113 |
-
except Exception:
|
| 114 |
-
if len(e.response_body) < 500:
|
| 115 |
-
error_message += f"\n\nServer response: {e.response_body}"
|
| 116 |
-
|
| 117 |
return {
|
| 118 |
-
"formatted":
|
| 119 |
"totalResults": 0,
|
| 120 |
"resultsShared": 0,
|
| 121 |
-
"isError": True
|
| 122 |
}
|
| 123 |
except Exception as e:
|
| 124 |
return {
|
| 125 |
"formatted": f"Error executing {operation}: {str(e)}",
|
| 126 |
"totalResults": 0,
|
| 127 |
"resultsShared": 0,
|
| 128 |
-
"isError": True
|
| 129 |
}
|
| 130 |
|
| 131 |
def _show_help(self) -> ToolResult:
|
| 132 |
"""Show usage instructions when tool is called with no arguments"""
|
| 133 |
-
cpu_flavors_list =
|
| 134 |
-
gpu_flavors_list =
|
| 135 |
-
specialized_flavors_list =
|
| 136 |
|
| 137 |
hardware_section = f"**CPU:** {cpu_flavors_list}\n"
|
| 138 |
if GPU_FLAVORS:
|
|
@@ -217,181 +349,194 @@ Call this tool with:
|
|
| 217 |
- Prefer array commands to avoid shell parsing surprises
|
| 218 |
- To access private Hub assets, include `secrets: {{ "HF_TOKEN": "$HF_TOKEN" }}` to inject your auth token.
|
| 219 |
"""
|
| 220 |
-
return {
|
| 221 |
-
"formatted": usage_text,
|
| 222 |
-
"totalResults": 1,
|
| 223 |
-
"resultsShared": 1
|
| 224 |
-
}
|
| 225 |
|
| 226 |
def _show_operation_help(self, operation: str) -> ToolResult:
|
| 227 |
"""Show help for a specific operation"""
|
| 228 |
help_text = f"Help for operation: {operation}\n\nCall with appropriate arguments. Use the main help for examples."
|
| 229 |
-
return {
|
| 230 |
-
"formatted": help_text,
|
| 231 |
-
"totalResults": 1,
|
| 232 |
-
"resultsShared": 1
|
| 233 |
-
}
|
| 234 |
|
| 235 |
async def _run_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 236 |
-
"""
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
|
| 249 |
-
|
| 250 |
-
|
| 251 |
-
|
| 252 |
-
|
| 253 |
-
|
| 254 |
-
|
| 255 |
-
|
| 256 |
-
|
| 257 |
-
|
| 258 |
-
|
| 259 |
-
|
| 260 |
-
|
| 261 |
-
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
| 266 |
-
"resultsShared": 1
|
| 267 |
-
}
|
| 268 |
-
|
| 269 |
-
# Not detached - return job info and link to logs
|
| 270 |
-
response = f"""Job started: {job['id']}
|
| 271 |
-
|
| 272 |
-
**Status:** {job['status']['stage']}
|
| 273 |
-
**View logs at:** {job_url}
|
| 274 |
|
| 275 |
Note: Logs are being collected. Check the job page for real-time logs.
|
| 276 |
"""
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
|
| 280 |
-
"
|
| 281 |
-
}
|
| 282 |
|
| 283 |
async def _run_uv_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 284 |
-
"""Run job with
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
| 289 |
-
|
| 290 |
-
|
| 291 |
-
|
| 292 |
-
|
| 293 |
-
|
| 294 |
-
|
| 295 |
-
|
| 296 |
-
|
| 297 |
-
|
| 298 |
-
|
| 299 |
-
|
| 300 |
-
|
| 301 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 302 |
|
| 303 |
-
|
|
|
|
| 304 |
|
| 305 |
async def _list_jobs(self, args: Dict[str, Any]) -> ToolResult:
|
| 306 |
-
"""List
|
| 307 |
-
|
| 308 |
-
|
|
|
|
| 309 |
|
| 310 |
# Filter jobs
|
| 311 |
-
|
|
|
|
| 312 |
|
| 313 |
-
|
| 314 |
-
|
| 315 |
-
|
| 316 |
|
| 317 |
-
#
|
| 318 |
-
|
| 319 |
-
status_filter = args['status'].upper()
|
| 320 |
-
jobs = [job for job in jobs if status_filter in job['status']['stage'].upper()]
|
| 321 |
|
| 322 |
-
|
| 323 |
-
table = format_jobs_table(jobs)
|
| 324 |
|
| 325 |
-
if len(
|
| 326 |
-
if args.get(
|
| 327 |
return {
|
| 328 |
"formatted": "No jobs found.",
|
| 329 |
"totalResults": 0,
|
| 330 |
-
"resultsShared": 0
|
| 331 |
}
|
| 332 |
return {
|
| 333 |
"formatted": 'No running jobs found. Use `{"args": {"all": true}}` to show all jobs.',
|
| 334 |
"totalResults": 0,
|
| 335 |
-
"resultsShared": 0
|
| 336 |
}
|
| 337 |
|
| 338 |
-
response = f"**Jobs ({len(
|
| 339 |
return {
|
| 340 |
"formatted": response,
|
| 341 |
-
"totalResults": len(
|
| 342 |
-
"resultsShared": len(
|
| 343 |
}
|
| 344 |
|
| 345 |
async def _get_logs(self, args: Dict[str, Any]) -> ToolResult:
|
| 346 |
-
"""
|
| 347 |
-
job_id = args.get(
|
| 348 |
if not job_id:
|
| 349 |
return {
|
| 350 |
"formatted": "job_id is required",
|
|
|
|
| 351 |
"totalResults": 0,
|
| 352 |
"resultsShared": 0,
|
| 353 |
-
"isError": True
|
| 354 |
}
|
| 355 |
|
| 356 |
-
|
| 357 |
-
|
| 358 |
-
|
|
|
|
|
|
|
|
|
|
| 359 |
|
| 360 |
-
|
| 361 |
-
|
| 362 |
-
|
|
|
|
|
|
|
|
|
|
| 363 |
|
| 364 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 365 |
|
| 366 |
-
|
| 367 |
-
|
| 368 |
-
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
|
| 372 |
-
|
| 373 |
|
| 374 |
async def _inspect_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 375 |
-
"""
|
| 376 |
-
job_id = args.get(
|
| 377 |
if not job_id:
|
| 378 |
return {
|
| 379 |
"formatted": "job_id is required",
|
| 380 |
"totalResults": 0,
|
| 381 |
"resultsShared": 0,
|
| 382 |
-
"isError": True
|
| 383 |
}
|
| 384 |
|
| 385 |
job_ids = job_id if isinstance(job_id, list) else [job_id]
|
| 386 |
|
| 387 |
-
# Fetch all jobs
|
| 388 |
jobs = []
|
| 389 |
for jid in job_ids:
|
| 390 |
try:
|
| 391 |
-
job = await
|
| 392 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 393 |
except Exception as e:
|
| 394 |
-
raise Exception(f"Failed to
|
| 395 |
|
| 396 |
formatted_details = format_job_details(jobs)
|
| 397 |
response = f"**Job Details** ({len(jobs)} job{'s' if len(jobs) > 1 else ''}):\n\n{formatted_details}"
|
|
@@ -399,213 +544,242 @@ Note: Full log streaming support is coming soon. Please use the web interface fo
|
|
| 399 |
return {
|
| 400 |
"formatted": response,
|
| 401 |
"totalResults": len(jobs),
|
| 402 |
-
"resultsShared": len(jobs)
|
| 403 |
}
|
| 404 |
|
| 405 |
async def _cancel_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 406 |
-
"""Cancel
|
| 407 |
-
job_id = args.get(
|
| 408 |
if not job_id:
|
| 409 |
return {
|
| 410 |
"formatted": "job_id is required",
|
| 411 |
"totalResults": 0,
|
| 412 |
"resultsShared": 0,
|
| 413 |
-
"isError": True
|
| 414 |
}
|
| 415 |
|
| 416 |
-
await
|
|
|
|
|
|
|
|
|
|
|
|
|
| 417 |
|
| 418 |
response = f"""✓ Job {job_id} has been cancelled.
|
| 419 |
|
| 420 |
To verify, call this tool with `{{"operation": "inspect", "args": {{"job_id": "{job_id}"}}}}`"""
|
| 421 |
|
| 422 |
-
return {
|
| 423 |
-
"formatted": response,
|
| 424 |
-
"totalResults": 1,
|
| 425 |
-
"resultsShared": 1
|
| 426 |
-
}
|
| 427 |
|
| 428 |
async def _scheduled_run(self, args: Dict[str, Any]) -> ToolResult:
|
| 429 |
-
"""Create
|
| 430 |
-
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
|
| 434 |
-
|
| 435 |
-
|
| 436 |
-
|
| 437 |
-
|
| 438 |
-
|
| 439 |
-
|
| 440 |
-
|
| 441 |
-
|
| 442 |
-
|
| 443 |
-
|
| 444 |
-
|
| 445 |
-
|
| 446 |
-
|
| 447 |
-
|
| 448 |
-
|
| 449 |
-
|
| 450 |
-
|
| 451 |
-
|
| 452 |
-
|
| 453 |
-
**Scheduled Job ID:** {scheduled_job['id']}
|
| 454 |
-
**Schedule:** {scheduled_job['schedule']}
|
| 455 |
-
**Suspended:** {'Yes' if scheduled_job.get('suspend') else 'No'}
|
| 456 |
-
**Next Run:** {scheduled_job.get('nextRun', 'N/A')}
|
| 457 |
-
|
| 458 |
-
To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_job['id']}"}}}}`
|
| 459 |
To list all, call this tool with `{{"operation": "scheduled ps"}}`"""
|
| 460 |
|
| 461 |
-
|
| 462 |
-
|
| 463 |
-
|
| 464 |
-
"
|
| 465 |
-
}
|
| 466 |
|
| 467 |
async def _scheduled_uv(self, args: Dict[str, Any]) -> ToolResult:
|
| 468 |
-
"""Create
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
|
| 472 |
-
|
| 473 |
-
|
| 474 |
-
|
| 475 |
-
|
| 476 |
-
|
| 477 |
-
|
| 478 |
-
|
| 479 |
-
|
| 480 |
-
|
| 481 |
-
|
| 482 |
-
|
| 483 |
-
|
| 484 |
-
|
| 485 |
-
|
| 486 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 487 |
|
| 488 |
-
|
|
|
|
| 489 |
|
| 490 |
async def _list_scheduled_jobs(self, args: Dict[str, Any]) -> ToolResult:
|
| 491 |
-
"""List scheduled jobs"""
|
| 492 |
-
|
| 493 |
-
|
|
|
|
|
|
|
| 494 |
|
| 495 |
-
# Filter jobs
|
| 496 |
-
|
|
|
|
| 497 |
|
| 498 |
-
#
|
| 499 |
-
|
| 500 |
-
jobs = [job for job in jobs if not job.get('suspend', False)]
|
| 501 |
|
| 502 |
-
|
| 503 |
-
table = format_scheduled_jobs_table(jobs)
|
| 504 |
|
| 505 |
-
if len(
|
| 506 |
-
if args.get(
|
| 507 |
return {
|
| 508 |
"formatted": "No scheduled jobs found.",
|
| 509 |
"totalResults": 0,
|
| 510 |
-
"resultsShared": 0
|
| 511 |
}
|
| 512 |
return {
|
| 513 |
"formatted": 'No active scheduled jobs found. Use `{"args": {"all": true}}` to show suspended jobs.',
|
| 514 |
"totalResults": 0,
|
| 515 |
-
"resultsShared": 0
|
| 516 |
}
|
| 517 |
|
| 518 |
-
response = f"**Scheduled Jobs ({len(
|
| 519 |
return {
|
| 520 |
"formatted": response,
|
| 521 |
-
"totalResults": len(
|
| 522 |
-
"resultsShared": len(
|
| 523 |
}
|
| 524 |
|
| 525 |
async def _inspect_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 526 |
-
"""
|
| 527 |
-
scheduled_job_id = args.get(
|
| 528 |
if not scheduled_job_id:
|
| 529 |
return {
|
| 530 |
"formatted": "scheduled_job_id is required",
|
| 531 |
"totalResults": 0,
|
| 532 |
"resultsShared": 0,
|
| 533 |
-
"isError": True
|
| 534 |
}
|
| 535 |
|
| 536 |
-
|
| 537 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 538 |
|
| 539 |
return {
|
| 540 |
"formatted": f"**Scheduled Job Details:**\n\n{formatted_details}",
|
| 541 |
"totalResults": 1,
|
| 542 |
-
"resultsShared": 1
|
| 543 |
}
|
| 544 |
|
| 545 |
async def _delete_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 546 |
-
"""Delete
|
| 547 |
-
scheduled_job_id = args.get(
|
| 548 |
if not scheduled_job_id:
|
| 549 |
return {
|
| 550 |
"formatted": "scheduled_job_id is required",
|
| 551 |
"totalResults": 0,
|
| 552 |
"resultsShared": 0,
|
| 553 |
-
"isError": True
|
| 554 |
}
|
| 555 |
|
| 556 |
-
await
|
|
|
|
|
|
|
|
|
|
|
|
|
| 557 |
|
| 558 |
return {
|
| 559 |
"formatted": f"✓ Scheduled job {scheduled_job_id} has been deleted.",
|
| 560 |
"totalResults": 1,
|
| 561 |
-
"resultsShared": 1
|
| 562 |
}
|
| 563 |
|
| 564 |
async def _suspend_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 565 |
-
"""Suspend
|
| 566 |
-
scheduled_job_id = args.get(
|
| 567 |
if not scheduled_job_id:
|
| 568 |
return {
|
| 569 |
"formatted": "scheduled_job_id is required",
|
| 570 |
"totalResults": 0,
|
| 571 |
"resultsShared": 0,
|
| 572 |
-
"isError": True
|
| 573 |
}
|
| 574 |
|
| 575 |
-
await
|
|
|
|
|
|
|
|
|
|
|
|
|
| 576 |
|
| 577 |
response = f"""✓ Scheduled job {scheduled_job_id} has been suspended.
|
| 578 |
|
| 579 |
To resume, call this tool with `{{"operation": "scheduled resume", "args": {{"scheduled_job_id": "{scheduled_job_id}"}}}}`"""
|
| 580 |
|
| 581 |
-
return {
|
| 582 |
-
"formatted": response,
|
| 583 |
-
"totalResults": 1,
|
| 584 |
-
"resultsShared": 1
|
| 585 |
-
}
|
| 586 |
|
| 587 |
async def _resume_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 588 |
-
"""Resume
|
| 589 |
-
scheduled_job_id = args.get(
|
| 590 |
if not scheduled_job_id:
|
| 591 |
return {
|
| 592 |
"formatted": "scheduled_job_id is required",
|
| 593 |
"totalResults": 0,
|
| 594 |
"resultsShared": 0,
|
| 595 |
-
"isError": True
|
| 596 |
}
|
| 597 |
|
| 598 |
-
await
|
|
|
|
|
|
|
|
|
|
|
|
|
| 599 |
|
| 600 |
response = f"""✓ Scheduled job {scheduled_job_id} has been resumed.
|
| 601 |
|
| 602 |
To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_job_id}"}}}}`"""
|
| 603 |
|
| 604 |
-
return {
|
| 605 |
-
"formatted": response,
|
| 606 |
-
"totalResults": 1,
|
| 607 |
-
"resultsShared": 1
|
| 608 |
-
}
|
| 609 |
|
| 610 |
|
| 611 |
# Tool specification for agent registration
|
|
@@ -622,23 +796,33 @@ HF_JOBS_TOOL_SPEC = {
|
|
| 622 |
"operation": {
|
| 623 |
"type": "string",
|
| 624 |
"enum": [
|
| 625 |
-
"run",
|
| 626 |
-
"
|
| 627 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 628 |
],
|
| 629 |
"description": (
|
| 630 |
-
"Operation to execute. Valid values: run, uv, ps, logs, inspect, cancel, "
|
| 631 |
"scheduled run, scheduled uv, scheduled ps, scheduled inspect, scheduled delete, "
|
| 632 |
-
"scheduled suspend, scheduled resume"
|
| 633 |
-
)
|
| 634 |
},
|
| 635 |
"args": {
|
| 636 |
"type": "object",
|
| 637 |
"description": "Operation-specific arguments as a JSON object",
|
| 638 |
-
"additionalProperties": True
|
| 639 |
-
}
|
| 640 |
-
}
|
| 641 |
-
}
|
| 642 |
}
|
| 643 |
|
| 644 |
|
|
|
|
| 1 |
"""
|
| 2 |
+
Hugging Face Jobs Tool - Using huggingface-hub library
|
| 3 |
|
| 4 |
+
Refactored to use official huggingface-hub library instead of custom HTTP client
|
| 5 |
"""
|
| 6 |
+
|
| 7 |
+
import asyncio
|
| 8 |
+
import base64
|
| 9 |
+
from typing import Any, Dict, Literal, Optional
|
| 10 |
+
|
| 11 |
+
from huggingface_hub import HfApi
|
| 12 |
+
from huggingface_hub.utils import HfHubHTTPError
|
| 13 |
+
|
| 14 |
+
from agent.tools.types import ToolResult
|
| 15 |
+
from agent.tools.utilities import (
|
| 16 |
format_job_details,
|
| 17 |
+
format_jobs_table,
|
| 18 |
format_scheduled_job_details,
|
| 19 |
+
format_scheduled_jobs_table,
|
| 20 |
)
|
| 21 |
|
|
|
|
| 22 |
# Hardware flavors
|
| 23 |
+
CPU_FLAVORS = ["cpu-basic", "cpu-upgrade", "cpu-performance", "cpu-xl"]
|
| 24 |
GPU_FLAVORS = [
|
| 25 |
+
"sprx8",
|
| 26 |
+
"zero-a10g",
|
| 27 |
+
"t4-small",
|
| 28 |
+
"t4-medium",
|
| 29 |
+
"l4x1",
|
| 30 |
+
"l4x4",
|
| 31 |
+
"l40sx1",
|
| 32 |
+
"l40sx4",
|
| 33 |
+
"l40sx8",
|
| 34 |
+
"a10g-small",
|
| 35 |
+
"a10g-large",
|
| 36 |
+
"a10g-largex2",
|
| 37 |
+
"a10g-largex4",
|
| 38 |
+
"a100-large",
|
| 39 |
+
"h100",
|
| 40 |
+
"h100x8",
|
| 41 |
]
|
| 42 |
+
SPECIALIZED_FLAVORS = ["inf2x6"]
|
| 43 |
ALL_FLAVORS = CPU_FLAVORS + GPU_FLAVORS + SPECIALIZED_FLAVORS
|
| 44 |
|
| 45 |
# Operation names
|
| 46 |
OperationType = Literal[
|
| 47 |
+
"run",
|
| 48 |
+
"uv",
|
| 49 |
+
"ps",
|
| 50 |
+
"logs",
|
| 51 |
+
"inspect",
|
| 52 |
+
"cancel",
|
| 53 |
+
"scheduled run",
|
| 54 |
+
"scheduled uv",
|
| 55 |
+
"scheduled ps",
|
| 56 |
+
"scheduled inspect",
|
| 57 |
+
"scheduled delete",
|
| 58 |
+
"scheduled suspend",
|
| 59 |
+
"scheduled resume",
|
| 60 |
]
|
| 61 |
|
| 62 |
# Constants
|
| 63 |
DEFAULT_LOG_WAIT_SECONDS = 10
|
| 64 |
+
UV_DEFAULT_IMAGE = "ghcr.io/astral-sh/uv:python3.12-bookworm"
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
def _build_uv_command(
|
| 68 |
+
script: str,
|
| 69 |
+
with_deps: list[str] | None = None,
|
| 70 |
+
python: str | None = None,
|
| 71 |
+
script_args: list[str] | None = None,
|
| 72 |
+
) -> list[str]:
|
| 73 |
+
"""Build UV run command"""
|
| 74 |
+
parts = ["uv", "run"]
|
| 75 |
+
|
| 76 |
+
if with_deps:
|
| 77 |
+
for dep in with_deps:
|
| 78 |
+
parts.extend(["--with", dep])
|
| 79 |
+
|
| 80 |
+
if python:
|
| 81 |
+
parts.extend(["-p", python])
|
| 82 |
+
|
| 83 |
+
parts.append(script)
|
| 84 |
+
|
| 85 |
+
if script_args:
|
| 86 |
+
parts.extend(script_args)
|
| 87 |
+
|
| 88 |
+
return parts
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
def _wrap_inline_script(
|
| 92 |
+
script: str,
|
| 93 |
+
with_deps: list[str] | None = None,
|
| 94 |
+
python: str | None = None,
|
| 95 |
+
script_args: list[str] | None = None,
|
| 96 |
+
) -> str:
|
| 97 |
+
"""Wrap inline script with base64 encoding to avoid file creation"""
|
| 98 |
+
encoded = base64.b64encode(script.encode("utf-8")).decode("utf-8")
|
| 99 |
+
# Build the uv command with stdin (-)
|
| 100 |
+
uv_command = _build_uv_command("-", with_deps, python, script_args)
|
| 101 |
+
# Join command parts with proper spacing
|
| 102 |
+
uv_command_str = " ".join(uv_command)
|
| 103 |
+
return f'echo "{encoded}" | base64 -d | {uv_command_str}'
|
| 104 |
+
|
| 105 |
+
|
| 106 |
+
def _resolve_uv_command(
|
| 107 |
+
script: str,
|
| 108 |
+
with_deps: list[str] | None = None,
|
| 109 |
+
python: str | None = None,
|
| 110 |
+
script_args: list[str] | None = None,
|
| 111 |
+
) -> list[str]:
|
| 112 |
+
"""Resolve UV command based on script source (URL, inline, or file path)"""
|
| 113 |
+
# If URL, use directly
|
| 114 |
+
if script.startswith("http://") or script.startswith("https://"):
|
| 115 |
+
return _build_uv_command(script, with_deps, python, script_args)
|
| 116 |
+
|
| 117 |
+
# If contains newline, treat as inline script
|
| 118 |
+
if "\n" in script:
|
| 119 |
+
wrapped = _wrap_inline_script(script, with_deps, python, script_args)
|
| 120 |
+
return ["/bin/sh", "-lc", wrapped]
|
| 121 |
+
|
| 122 |
+
# Otherwise, treat as file path
|
| 123 |
+
return _build_uv_command(script, with_deps, python, script_args)
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
async def _async_call(func, *args, **kwargs):
|
| 127 |
+
"""Wrap synchronous HfApi calls for async context"""
|
| 128 |
+
return await asyncio.to_thread(func, *args, **kwargs)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def _job_info_to_dict(job_info) -> Dict[str, Any]:
|
| 132 |
+
"""Convert JobInfo object to dictionary for formatting functions"""
|
| 133 |
+
return {
|
| 134 |
+
"id": job_info.id,
|
| 135 |
+
"status": {"stage": job_info.status.stage, "message": job_info.status.message},
|
| 136 |
+
"command": job_info.command,
|
| 137 |
+
"createdAt": job_info.created_at.isoformat(),
|
| 138 |
+
"dockerImage": job_info.docker_image,
|
| 139 |
+
"spaceId": job_info.space_id,
|
| 140 |
+
"flavor": job_info.flavor,
|
| 141 |
+
"owner": {"name": job_info.owner.name},
|
| 142 |
+
}
|
| 143 |
+
|
| 144 |
+
|
| 145 |
+
def _scheduled_job_info_to_dict(scheduled_job_info) -> Dict[str, Any]:
|
| 146 |
+
"""Convert ScheduledJobInfo object to dictionary for formatting functions"""
|
| 147 |
+
job_spec = scheduled_job_info.job_spec
|
| 148 |
+
|
| 149 |
+
# Extract last run and next run from status
|
| 150 |
+
last_run = None
|
| 151 |
+
next_run = None
|
| 152 |
+
if scheduled_job_info.status:
|
| 153 |
+
if scheduled_job_info.status.last_job:
|
| 154 |
+
last_run = scheduled_job_info.status.last_job.created_at
|
| 155 |
+
if last_run:
|
| 156 |
+
last_run = (
|
| 157 |
+
last_run.isoformat()
|
| 158 |
+
if hasattr(last_run, "isoformat")
|
| 159 |
+
else str(last_run)
|
| 160 |
+
)
|
| 161 |
+
if scheduled_job_info.status.next_job_run_at:
|
| 162 |
+
next_run = scheduled_job_info.status.next_job_run_at
|
| 163 |
+
next_run = (
|
| 164 |
+
next_run.isoformat()
|
| 165 |
+
if hasattr(next_run, "isoformat")
|
| 166 |
+
else str(next_run)
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
return {
|
| 170 |
+
"id": scheduled_job_info.id,
|
| 171 |
+
"schedule": scheduled_job_info.schedule,
|
| 172 |
+
"suspend": scheduled_job_info.suspend,
|
| 173 |
+
"lastRun": last_run,
|
| 174 |
+
"nextRun": next_run,
|
| 175 |
+
"jobSpec": {
|
| 176 |
+
"dockerImage": job_spec.docker_image,
|
| 177 |
+
"spaceId": job_spec.space_id,
|
| 178 |
+
"command": job_spec.command or [],
|
| 179 |
+
"flavor": job_spec.flavor or "cpu-basic",
|
| 180 |
+
},
|
| 181 |
+
}
|
| 182 |
|
| 183 |
|
| 184 |
class HfJobsTool:
|
| 185 |
+
"""Tool for managing Hugging Face compute jobs using huggingface-hub library"""
|
| 186 |
|
| 187 |
def __init__(self, hf_token: Optional[str] = None, namespace: Optional[str] = None):
|
| 188 |
+
self.api = HfApi(token=hf_token)
|
| 189 |
+
self.namespace = namespace
|
| 190 |
|
| 191 |
async def execute(self, params: Dict[str, Any]) -> ToolResult:
|
| 192 |
"""Execute the specified operation"""
|
| 193 |
+
operation = params.get("operation")
|
| 194 |
+
args = params.get("args", {})
|
| 195 |
|
| 196 |
# If no operation provided, return usage instructions
|
| 197 |
if not operation:
|
|
|
|
| 201 |
operation = operation.lower()
|
| 202 |
|
| 203 |
# Check if help is requested
|
| 204 |
+
if args.get("help"):
|
| 205 |
return self._show_operation_help(operation)
|
| 206 |
|
| 207 |
try:
|
|
|
|
| 235 |
else:
|
| 236 |
return {
|
| 237 |
"formatted": f'Unknown operation: "{operation}"\n\n'
|
| 238 |
+
"Available operations:\n"
|
| 239 |
+
"- run, uv, ps, logs, inspect, cancel\n"
|
| 240 |
+
"- scheduled run, scheduled uv, scheduled ps, scheduled inspect, "
|
| 241 |
+
"scheduled delete, scheduled suspend, scheduled resume\n\n"
|
| 242 |
+
"Call this tool with no operation for full usage instructions.",
|
| 243 |
"totalResults": 0,
|
| 244 |
"resultsShared": 0,
|
| 245 |
+
"isError": True,
|
| 246 |
}
|
| 247 |
|
| 248 |
+
except HfHubHTTPError as e:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 249 |
return {
|
| 250 |
+
"formatted": f"API Error: {str(e)}",
|
| 251 |
"totalResults": 0,
|
| 252 |
"resultsShared": 0,
|
| 253 |
+
"isError": True,
|
| 254 |
}
|
| 255 |
except Exception as e:
|
| 256 |
return {
|
| 257 |
"formatted": f"Error executing {operation}: {str(e)}",
|
| 258 |
"totalResults": 0,
|
| 259 |
"resultsShared": 0,
|
| 260 |
+
"isError": True,
|
| 261 |
}
|
| 262 |
|
| 263 |
def _show_help(self) -> ToolResult:
|
| 264 |
"""Show usage instructions when tool is called with no arguments"""
|
| 265 |
+
cpu_flavors_list = ", ".join(CPU_FLAVORS)
|
| 266 |
+
gpu_flavors_list = ", ".join(GPU_FLAVORS)
|
| 267 |
+
specialized_flavors_list = ", ".join(SPECIALIZED_FLAVORS)
|
| 268 |
|
| 269 |
hardware_section = f"**CPU:** {cpu_flavors_list}\n"
|
| 270 |
if GPU_FLAVORS:
|
|
|
|
| 349 |
- Prefer array commands to avoid shell parsing surprises
|
| 350 |
- To access private Hub assets, include `secrets: {{ "HF_TOKEN": "$HF_TOKEN" }}` to inject your auth token.
|
| 351 |
"""
|
| 352 |
+
return {"formatted": usage_text, "totalResults": 1, "resultsShared": 1}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 353 |
|
| 354 |
def _show_operation_help(self, operation: str) -> ToolResult:
|
| 355 |
"""Show help for a specific operation"""
|
| 356 |
help_text = f"Help for operation: {operation}\n\nCall with appropriate arguments. Use the main help for examples."
|
| 357 |
+
return {"formatted": help_text, "totalResults": 1, "resultsShared": 1}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 358 |
|
| 359 |
async def _run_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 360 |
+
"""Run a job using HfApi.run_job()"""
|
| 361 |
+
try:
|
| 362 |
+
job = await _async_call(
|
| 363 |
+
self.api.run_job,
|
| 364 |
+
image=args.get("image", "python:3.12"),
|
| 365 |
+
command=args.get("command"),
|
| 366 |
+
env=args.get("env"),
|
| 367 |
+
secrets=args.get("secrets"),
|
| 368 |
+
flavor=args.get("flavor", "cpu-basic"),
|
| 369 |
+
timeout=args.get("timeout", "30m"),
|
| 370 |
+
namespace=args.get("namespace") or self.namespace,
|
| 371 |
+
)
|
| 372 |
+
|
| 373 |
+
# If detached, return immediately
|
| 374 |
+
if args.get("detach", False):
|
| 375 |
+
response = f"""Job started successfully!
|
| 376 |
+
|
| 377 |
+
**Job ID:** {job.id}
|
| 378 |
+
**Status:** {job.status.stage}
|
| 379 |
+
**View at:** {job.url}
|
| 380 |
+
|
| 381 |
+
To check logs, call this tool with `{{"operation": "logs", "args": {{"job_id": "{job.id}"}}}}`
|
| 382 |
+
To inspect, call this tool with `{{"operation": "inspect", "args": {{"job_id": "{job.id}"}}}}`"""
|
| 383 |
+
return {"formatted": response, "totalResults": 1, "resultsShared": 1}
|
| 384 |
+
|
| 385 |
+
# Not detached - return job info
|
| 386 |
+
response = f"""Job started: {job.id}
|
| 387 |
+
|
| 388 |
+
**Status:** {job.status.stage}
|
| 389 |
+
**View logs at:** {job.url}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 390 |
|
| 391 |
Note: Logs are being collected. Check the job page for real-time logs.
|
| 392 |
"""
|
| 393 |
+
return {"formatted": response, "totalResults": 1, "resultsShared": 1}
|
| 394 |
+
|
| 395 |
+
except Exception as e:
|
| 396 |
+
raise Exception(f"Failed to run job: {str(e)}")
|
|
|
|
| 397 |
|
| 398 |
async def _run_uv_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 399 |
+
"""Run UV job with inline script support (no local files needed)"""
|
| 400 |
+
try:
|
| 401 |
+
script = args.get("script")
|
| 402 |
+
if not script:
|
| 403 |
+
raise ValueError("script is required")
|
| 404 |
+
|
| 405 |
+
# Resolve the command based on script type (URL, inline, or file)
|
| 406 |
+
command = _resolve_uv_command(
|
| 407 |
+
script=script,
|
| 408 |
+
with_deps=args.get("with_deps") or args.get("dependencies"),
|
| 409 |
+
python=args.get("python"),
|
| 410 |
+
script_args=args.get("script_args"),
|
| 411 |
+
)
|
| 412 |
+
|
| 413 |
+
# Use run_job with UV image instead of run_uv_job
|
| 414 |
+
job = await _async_call(
|
| 415 |
+
self.api.run_job,
|
| 416 |
+
image=UV_DEFAULT_IMAGE,
|
| 417 |
+
command=command,
|
| 418 |
+
env=args.get("env"),
|
| 419 |
+
secrets=args.get("secrets"),
|
| 420 |
+
flavor=args.get("flavor", "cpu-basic"),
|
| 421 |
+
timeout=args.get("timeout", "30m"),
|
| 422 |
+
namespace=args.get("namespace") or self.namespace,
|
| 423 |
+
)
|
| 424 |
+
|
| 425 |
+
response = f"""UV Job started: {job.id}
|
| 426 |
+
|
| 427 |
+
**Status:** {job.status.stage}
|
| 428 |
+
**View at:** {job.url}
|
| 429 |
+
|
| 430 |
+
To check logs, call this tool with `{{"operation": "logs", "args": {{"job_id": "{job.id}"}}}}`
|
| 431 |
+
"""
|
| 432 |
+
return {"formatted": response, "totalResults": 1, "resultsShared": 1}
|
| 433 |
|
| 434 |
+
except Exception as e:
|
| 435 |
+
raise Exception(f"Failed to run UV job: {str(e)}")
|
| 436 |
|
| 437 |
async def _list_jobs(self, args: Dict[str, Any]) -> ToolResult:
|
| 438 |
+
"""List jobs using HfApi.list_jobs()"""
|
| 439 |
+
jobs_list = await _async_call(
|
| 440 |
+
self.api.list_jobs, namespace=args.get("namespace") or self.namespace
|
| 441 |
+
)
|
| 442 |
|
| 443 |
# Filter jobs
|
| 444 |
+
if not args.get("all", False):
|
| 445 |
+
jobs_list = [j for j in jobs_list if j.status.stage == "RUNNING"]
|
| 446 |
|
| 447 |
+
if args.get("status"):
|
| 448 |
+
status_filter = args["status"].upper()
|
| 449 |
+
jobs_list = [j for j in jobs_list if status_filter in j.status.stage]
|
| 450 |
|
| 451 |
+
# Convert JobInfo objects to dicts for formatting
|
| 452 |
+
jobs_dicts = [_job_info_to_dict(j) for j in jobs_list]
|
|
|
|
|
|
|
| 453 |
|
| 454 |
+
table = format_jobs_table(jobs_dicts)
|
|
|
|
| 455 |
|
| 456 |
+
if len(jobs_list) == 0:
|
| 457 |
+
if args.get("all", False):
|
| 458 |
return {
|
| 459 |
"formatted": "No jobs found.",
|
| 460 |
"totalResults": 0,
|
| 461 |
+
"resultsShared": 0,
|
| 462 |
}
|
| 463 |
return {
|
| 464 |
"formatted": 'No running jobs found. Use `{"args": {"all": true}}` to show all jobs.',
|
| 465 |
"totalResults": 0,
|
| 466 |
+
"resultsShared": 0,
|
| 467 |
}
|
| 468 |
|
| 469 |
+
response = f"**Jobs ({len(jobs_list)} total):**\n\n{table}"
|
| 470 |
return {
|
| 471 |
"formatted": response,
|
| 472 |
+
"totalResults": len(jobs_list),
|
| 473 |
+
"resultsShared": len(jobs_list),
|
| 474 |
}
|
| 475 |
|
| 476 |
async def _get_logs(self, args: Dict[str, Any]) -> ToolResult:
|
| 477 |
+
"""Fetch logs using HfApi.fetch_job_logs()"""
|
| 478 |
+
job_id = args.get("job_id")
|
| 479 |
if not job_id:
|
| 480 |
return {
|
| 481 |
"formatted": "job_id is required",
|
| 482 |
+
"isError": True,
|
| 483 |
"totalResults": 0,
|
| 484 |
"resultsShared": 0,
|
|
|
|
| 485 |
}
|
| 486 |
|
| 487 |
+
try:
|
| 488 |
+
# Fetch logs (returns generator, convert to list)
|
| 489 |
+
logs_gen = self.api.fetch_job_logs(
|
| 490 |
+
job_id=job_id, namespace=args.get("namespace") or self.namespace
|
| 491 |
+
)
|
| 492 |
+
logs = await _async_call(list, logs_gen)
|
| 493 |
|
| 494 |
+
if not logs:
|
| 495 |
+
return {
|
| 496 |
+
"formatted": f"No logs available for job {job_id}",
|
| 497 |
+
"totalResults": 0,
|
| 498 |
+
"resultsShared": 0,
|
| 499 |
+
}
|
| 500 |
|
| 501 |
+
log_text = "\n".join(logs)
|
| 502 |
+
return {
|
| 503 |
+
"formatted": f"**Logs for {job_id}:**\n\n```\n{log_text}\n```",
|
| 504 |
+
"totalResults": 1,
|
| 505 |
+
"resultsShared": 1,
|
| 506 |
+
}
|
| 507 |
|
| 508 |
+
except Exception as e:
|
| 509 |
+
return {
|
| 510 |
+
"formatted": f"Failed to fetch logs: {str(e)}",
|
| 511 |
+
"isError": True,
|
| 512 |
+
"totalResults": 0,
|
| 513 |
+
"resultsShared": 0,
|
| 514 |
+
}
|
| 515 |
|
| 516 |
async def _inspect_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 517 |
+
"""Inspect job using HfApi.inspect_job()"""
|
| 518 |
+
job_id = args.get("job_id")
|
| 519 |
if not job_id:
|
| 520 |
return {
|
| 521 |
"formatted": "job_id is required",
|
| 522 |
"totalResults": 0,
|
| 523 |
"resultsShared": 0,
|
| 524 |
+
"isError": True,
|
| 525 |
}
|
| 526 |
|
| 527 |
job_ids = job_id if isinstance(job_id, list) else [job_id]
|
| 528 |
|
|
|
|
| 529 |
jobs = []
|
| 530 |
for jid in job_ids:
|
| 531 |
try:
|
| 532 |
+
job = await _async_call(
|
| 533 |
+
self.api.inspect_job,
|
| 534 |
+
job_id=jid,
|
| 535 |
+
namespace=args.get("namespace") or self.namespace,
|
| 536 |
+
)
|
| 537 |
+
jobs.append(_job_info_to_dict(job))
|
| 538 |
except Exception as e:
|
| 539 |
+
raise Exception(f"Failed to inspect job {jid}: {str(e)}")
|
| 540 |
|
| 541 |
formatted_details = format_job_details(jobs)
|
| 542 |
response = f"**Job Details** ({len(jobs)} job{'s' if len(jobs) > 1 else ''}):\n\n{formatted_details}"
|
|
|
|
| 544 |
return {
|
| 545 |
"formatted": response,
|
| 546 |
"totalResults": len(jobs),
|
| 547 |
+
"resultsShared": len(jobs),
|
| 548 |
}
|
| 549 |
|
| 550 |
async def _cancel_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 551 |
+
"""Cancel job using HfApi.cancel_job()"""
|
| 552 |
+
job_id = args.get("job_id")
|
| 553 |
if not job_id:
|
| 554 |
return {
|
| 555 |
"formatted": "job_id is required",
|
| 556 |
"totalResults": 0,
|
| 557 |
"resultsShared": 0,
|
| 558 |
+
"isError": True,
|
| 559 |
}
|
| 560 |
|
| 561 |
+
await _async_call(
|
| 562 |
+
self.api.cancel_job,
|
| 563 |
+
job_id=job_id,
|
| 564 |
+
namespace=args.get("namespace") or self.namespace,
|
| 565 |
+
)
|
| 566 |
|
| 567 |
response = f"""✓ Job {job_id} has been cancelled.
|
| 568 |
|
| 569 |
To verify, call this tool with `{{"operation": "inspect", "args": {{"job_id": "{job_id}"}}}}`"""
|
| 570 |
|
| 571 |
+
return {"formatted": response, "totalResults": 1, "resultsShared": 1}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 572 |
|
| 573 |
async def _scheduled_run(self, args: Dict[str, Any]) -> ToolResult:
|
| 574 |
+
"""Create scheduled job using HfApi.create_scheduled_job()"""
|
| 575 |
+
try:
|
| 576 |
+
scheduled_job = await _async_call(
|
| 577 |
+
self.api.create_scheduled_job,
|
| 578 |
+
image=args.get("image", "python:3.12"),
|
| 579 |
+
command=args.get("command"),
|
| 580 |
+
schedule=args.get("schedule"),
|
| 581 |
+
env=args.get("env"),
|
| 582 |
+
secrets=args.get("secrets"),
|
| 583 |
+
flavor=args.get("flavor", "cpu-basic"),
|
| 584 |
+
timeout=args.get("timeout", "30m"),
|
| 585 |
+
namespace=args.get("namespace") or self.namespace,
|
| 586 |
+
)
|
| 587 |
+
|
| 588 |
+
scheduled_dict = _scheduled_job_info_to_dict(scheduled_job)
|
| 589 |
+
|
| 590 |
+
response = f"""✓ Scheduled job created successfully!
|
| 591 |
+
|
| 592 |
+
**Scheduled Job ID:** {scheduled_dict["id"]}
|
| 593 |
+
**Schedule:** {scheduled_dict["schedule"]}
|
| 594 |
+
**Suspended:** {"Yes" if scheduled_dict.get("suspend") else "No"}
|
| 595 |
+
**Next Run:** {scheduled_dict.get("nextRun", "N/A")}
|
| 596 |
+
|
| 597 |
+
To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_dict["id"]}"}}}}`
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 598 |
To list all, call this tool with `{{"operation": "scheduled ps"}}`"""
|
| 599 |
|
| 600 |
+
return {"formatted": response, "totalResults": 1, "resultsShared": 1}
|
| 601 |
+
|
| 602 |
+
except Exception as e:
|
| 603 |
+
raise Exception(f"Failed to create scheduled job: {str(e)}")
|
|
|
|
| 604 |
|
| 605 |
async def _scheduled_uv(self, args: Dict[str, Any]) -> ToolResult:
|
| 606 |
+
"""Create scheduled UV job with inline script support"""
|
| 607 |
+
try:
|
| 608 |
+
script = args.get("script")
|
| 609 |
+
if not script:
|
| 610 |
+
raise ValueError("script is required")
|
| 611 |
+
|
| 612 |
+
schedule = args.get("schedule")
|
| 613 |
+
if not schedule:
|
| 614 |
+
raise ValueError("schedule is required")
|
| 615 |
+
|
| 616 |
+
# Resolve the command based on script type
|
| 617 |
+
command = _resolve_uv_command(
|
| 618 |
+
script=script,
|
| 619 |
+
with_deps=args.get("with_deps") or args.get("dependencies"),
|
| 620 |
+
python=args.get("python"),
|
| 621 |
+
script_args=args.get("script_args"),
|
| 622 |
+
)
|
| 623 |
+
|
| 624 |
+
# Use create_scheduled_job with UV image
|
| 625 |
+
scheduled_job = await _async_call(
|
| 626 |
+
self.api.create_scheduled_job,
|
| 627 |
+
image=UV_DEFAULT_IMAGE,
|
| 628 |
+
command=command,
|
| 629 |
+
schedule=schedule,
|
| 630 |
+
env=args.get("env"),
|
| 631 |
+
secrets=args.get("secrets"),
|
| 632 |
+
flavor=args.get("flavor", "cpu-basic"),
|
| 633 |
+
timeout=args.get("timeout", "30m"),
|
| 634 |
+
namespace=args.get("namespace") or self.namespace,
|
| 635 |
+
)
|
| 636 |
+
|
| 637 |
+
scheduled_dict = _scheduled_job_info_to_dict(scheduled_job)
|
| 638 |
+
|
| 639 |
+
response = f"""✓ Scheduled UV job created successfully!
|
| 640 |
+
|
| 641 |
+
**Scheduled Job ID:** {scheduled_dict["id"]}
|
| 642 |
+
**Schedule:** {scheduled_dict["schedule"]}
|
| 643 |
+
**Suspended:** {"Yes" if scheduled_dict.get("suspend") else "No"}
|
| 644 |
+
**Next Run:** {scheduled_dict.get("nextRun", "N/A")}
|
| 645 |
+
|
| 646 |
+
To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_dict["id"]}"}}}}`"""
|
| 647 |
+
|
| 648 |
+
return {"formatted": response, "totalResults": 1, "resultsShared": 1}
|
| 649 |
|
| 650 |
+
except Exception as e:
|
| 651 |
+
raise Exception(f"Failed to create scheduled UV job: {str(e)}")
|
| 652 |
|
| 653 |
async def _list_scheduled_jobs(self, args: Dict[str, Any]) -> ToolResult:
|
| 654 |
+
"""List scheduled jobs using HfApi.list_scheduled_jobs()"""
|
| 655 |
+
scheduled_jobs_list = await _async_call(
|
| 656 |
+
self.api.list_scheduled_jobs,
|
| 657 |
+
namespace=args.get("namespace") or self.namespace,
|
| 658 |
+
)
|
| 659 |
|
| 660 |
+
# Filter jobs - default: hide suspended jobs unless --all is specified
|
| 661 |
+
if not args.get("all", False):
|
| 662 |
+
scheduled_jobs_list = [j for j in scheduled_jobs_list if not j.suspend]
|
| 663 |
|
| 664 |
+
# Convert to dicts for formatting
|
| 665 |
+
scheduled_dicts = [_scheduled_job_info_to_dict(j) for j in scheduled_jobs_list]
|
|
|
|
| 666 |
|
| 667 |
+
table = format_scheduled_jobs_table(scheduled_dicts)
|
|
|
|
| 668 |
|
| 669 |
+
if len(scheduled_jobs_list) == 0:
|
| 670 |
+
if args.get("all", False):
|
| 671 |
return {
|
| 672 |
"formatted": "No scheduled jobs found.",
|
| 673 |
"totalResults": 0,
|
| 674 |
+
"resultsShared": 0,
|
| 675 |
}
|
| 676 |
return {
|
| 677 |
"formatted": 'No active scheduled jobs found. Use `{"args": {"all": true}}` to show suspended jobs.',
|
| 678 |
"totalResults": 0,
|
| 679 |
+
"resultsShared": 0,
|
| 680 |
}
|
| 681 |
|
| 682 |
+
response = f"**Scheduled Jobs ({len(scheduled_jobs_list)} total):**\n\n{table}"
|
| 683 |
return {
|
| 684 |
"formatted": response,
|
| 685 |
+
"totalResults": len(scheduled_jobs_list),
|
| 686 |
+
"resultsShared": len(scheduled_jobs_list),
|
| 687 |
}
|
| 688 |
|
| 689 |
async def _inspect_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 690 |
+
"""Inspect scheduled job using HfApi.inspect_scheduled_job()"""
|
| 691 |
+
scheduled_job_id = args.get("scheduled_job_id")
|
| 692 |
if not scheduled_job_id:
|
| 693 |
return {
|
| 694 |
"formatted": "scheduled_job_id is required",
|
| 695 |
"totalResults": 0,
|
| 696 |
"resultsShared": 0,
|
| 697 |
+
"isError": True,
|
| 698 |
}
|
| 699 |
|
| 700 |
+
scheduled_job = await _async_call(
|
| 701 |
+
self.api.inspect_scheduled_job,
|
| 702 |
+
scheduled_job_id=scheduled_job_id,
|
| 703 |
+
namespace=args.get("namespace") or self.namespace,
|
| 704 |
+
)
|
| 705 |
+
|
| 706 |
+
scheduled_dict = _scheduled_job_info_to_dict(scheduled_job)
|
| 707 |
+
formatted_details = format_scheduled_job_details(scheduled_dict)
|
| 708 |
|
| 709 |
return {
|
| 710 |
"formatted": f"**Scheduled Job Details:**\n\n{formatted_details}",
|
| 711 |
"totalResults": 1,
|
| 712 |
+
"resultsShared": 1,
|
| 713 |
}
|
| 714 |
|
| 715 |
async def _delete_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 716 |
+
"""Delete scheduled job using HfApi.delete_scheduled_job()"""
|
| 717 |
+
scheduled_job_id = args.get("scheduled_job_id")
|
| 718 |
if not scheduled_job_id:
|
| 719 |
return {
|
| 720 |
"formatted": "scheduled_job_id is required",
|
| 721 |
"totalResults": 0,
|
| 722 |
"resultsShared": 0,
|
| 723 |
+
"isError": True,
|
| 724 |
}
|
| 725 |
|
| 726 |
+
await _async_call(
|
| 727 |
+
self.api.delete_scheduled_job,
|
| 728 |
+
scheduled_job_id=scheduled_job_id,
|
| 729 |
+
namespace=args.get("namespace") or self.namespace,
|
| 730 |
+
)
|
| 731 |
|
| 732 |
return {
|
| 733 |
"formatted": f"✓ Scheduled job {scheduled_job_id} has been deleted.",
|
| 734 |
"totalResults": 1,
|
| 735 |
+
"resultsShared": 1,
|
| 736 |
}
|
| 737 |
|
| 738 |
async def _suspend_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 739 |
+
"""Suspend scheduled job using HfApi.suspend_scheduled_job()"""
|
| 740 |
+
scheduled_job_id = args.get("scheduled_job_id")
|
| 741 |
if not scheduled_job_id:
|
| 742 |
return {
|
| 743 |
"formatted": "scheduled_job_id is required",
|
| 744 |
"totalResults": 0,
|
| 745 |
"resultsShared": 0,
|
| 746 |
+
"isError": True,
|
| 747 |
}
|
| 748 |
|
| 749 |
+
await _async_call(
|
| 750 |
+
self.api.suspend_scheduled_job,
|
| 751 |
+
scheduled_job_id=scheduled_job_id,
|
| 752 |
+
namespace=args.get("namespace") or self.namespace,
|
| 753 |
+
)
|
| 754 |
|
| 755 |
response = f"""✓ Scheduled job {scheduled_job_id} has been suspended.
|
| 756 |
|
| 757 |
To resume, call this tool with `{{"operation": "scheduled resume", "args": {{"scheduled_job_id": "{scheduled_job_id}"}}}}`"""
|
| 758 |
|
| 759 |
+
return {"formatted": response, "totalResults": 1, "resultsShared": 1}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 760 |
|
| 761 |
async def _resume_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 762 |
+
"""Resume scheduled job using HfApi.resume_scheduled_job()"""
|
| 763 |
+
scheduled_job_id = args.get("scheduled_job_id")
|
| 764 |
if not scheduled_job_id:
|
| 765 |
return {
|
| 766 |
"formatted": "scheduled_job_id is required",
|
| 767 |
"totalResults": 0,
|
| 768 |
"resultsShared": 0,
|
| 769 |
+
"isError": True,
|
| 770 |
}
|
| 771 |
|
| 772 |
+
await _async_call(
|
| 773 |
+
self.api.resume_scheduled_job,
|
| 774 |
+
scheduled_job_id=scheduled_job_id,
|
| 775 |
+
namespace=args.get("namespace") or self.namespace,
|
| 776 |
+
)
|
| 777 |
|
| 778 |
response = f"""✓ Scheduled job {scheduled_job_id} has been resumed.
|
| 779 |
|
| 780 |
To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_job_id}"}}}}`"""
|
| 781 |
|
| 782 |
+
return {"formatted": response, "totalResults": 1, "resultsShared": 1}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 783 |
|
| 784 |
|
| 785 |
# Tool specification for agent registration
|
|
|
|
| 796 |
"operation": {
|
| 797 |
"type": "string",
|
| 798 |
"enum": [
|
| 799 |
+
"run",
|
| 800 |
+
"uv",
|
| 801 |
+
"ps",
|
| 802 |
+
"logs",
|
| 803 |
+
"inspect",
|
| 804 |
+
"cancel",
|
| 805 |
+
"scheduled run",
|
| 806 |
+
"scheduled uv",
|
| 807 |
+
"scheduled ps",
|
| 808 |
+
"scheduled inspect",
|
| 809 |
+
"scheduled delete",
|
| 810 |
+
"scheduled suspend",
|
| 811 |
+
"scheduled resume",
|
| 812 |
],
|
| 813 |
"description": (
|
| 814 |
+
"Operation to execute. Valid values: [run, uv, ps, logs, inspect, cancel, "
|
| 815 |
"scheduled run, scheduled uv, scheduled ps, scheduled inspect, scheduled delete, "
|
| 816 |
+
"scheduled suspend, scheduled resume]"
|
| 817 |
+
),
|
| 818 |
},
|
| 819 |
"args": {
|
| 820 |
"type": "object",
|
| 821 |
"description": "Operation-specific arguments as a JSON object",
|
| 822 |
+
"additionalProperties": True,
|
| 823 |
+
},
|
| 824 |
+
},
|
| 825 |
+
},
|
| 826 |
}
|
| 827 |
|
| 828 |
|
agent/tools/{hf/types.py → types.py}
RENAMED
|
@@ -3,11 +3,13 @@ Types for Hugging Face tools
|
|
| 3 |
|
| 4 |
Ported from: hf-mcp-server/packages/mcp/src/types/
|
| 5 |
"""
|
| 6 |
-
|
|
|
|
| 7 |
|
| 8 |
|
| 9 |
class ToolResult(TypedDict, total=False):
|
| 10 |
"""Result returned by HF tool operations"""
|
|
|
|
| 11 |
formatted: str
|
| 12 |
totalResults: int
|
| 13 |
resultsShared: int
|
|
|
|
| 3 |
|
| 4 |
Ported from: hf-mcp-server/packages/mcp/src/types/
|
| 5 |
"""
|
| 6 |
+
|
| 7 |
+
from typing import TypedDict
|
| 8 |
|
| 9 |
|
| 10 |
class ToolResult(TypedDict, total=False):
|
| 11 |
"""Result returned by HF tool operations"""
|
| 12 |
+
|
| 13 |
formatted: str
|
| 14 |
totalResults: int
|
| 15 |
resultsShared: int
|
agent/tools/{hf/utilities.py → utilities.py}
RENAMED
|
@@ -3,15 +3,16 @@ Utility functions for Hugging Face tools
|
|
| 3 |
|
| 4 |
Ported from: hf-mcp-server/packages/mcp/src/jobs/formatters.ts
|
| 5 |
"""
|
| 6 |
-
|
| 7 |
from datetime import datetime
|
|
|
|
| 8 |
|
| 9 |
|
| 10 |
def truncate(text: str, max_length: int) -> str:
|
| 11 |
"""Truncate a string to a maximum length with ellipsis"""
|
| 12 |
if len(text) <= max_length:
|
| 13 |
return text
|
| 14 |
-
return text[:max_length - 3] + "..."
|
| 15 |
|
| 16 |
|
| 17 |
def format_date(date_str: Optional[str]) -> str:
|
|
@@ -19,8 +20,8 @@ def format_date(date_str: Optional[str]) -> str:
|
|
| 19 |
if not date_str:
|
| 20 |
return "N/A"
|
| 21 |
try:
|
| 22 |
-
date = datetime.fromisoformat(date_str.replace(
|
| 23 |
-
return date.strftime(
|
| 24 |
except Exception:
|
| 25 |
return date_str
|
| 26 |
|
|
@@ -34,10 +35,10 @@ def format_command(command: Optional[List[str]]) -> str:
|
|
| 34 |
|
| 35 |
def get_image_or_space(job: Dict[str, Any]) -> str:
|
| 36 |
"""Get image/space identifier from job"""
|
| 37 |
-
if job.get(
|
| 38 |
-
return job[
|
| 39 |
-
if job.get(
|
| 40 |
-
return job[
|
| 41 |
return "N/A"
|
| 42 |
|
| 43 |
|
|
@@ -47,16 +48,16 @@ def format_jobs_table(jobs: List[Dict[str, Any]]) -> str:
|
|
| 47 |
return "No jobs found."
|
| 48 |
|
| 49 |
# Calculate dynamic ID column width
|
| 50 |
-
longest_id_length = max(len(job[
|
| 51 |
-
id_column_width = max(longest_id_length, len(
|
| 52 |
|
| 53 |
# Define column widths
|
| 54 |
col_widths = {
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
}
|
| 61 |
|
| 62 |
# Build header
|
|
@@ -66,15 +67,17 @@ def format_jobs_table(jobs: List[Dict[str, Any]]) -> str:
|
|
| 66 |
# Build rows
|
| 67 |
rows = []
|
| 68 |
for job in jobs:
|
| 69 |
-
job_id = job[
|
| 70 |
-
image = truncate(get_image_or_space(job), col_widths[
|
| 71 |
-
command = truncate(format_command(job.get(
|
| 72 |
-
created = truncate(format_date(job.get(
|
| 73 |
-
status = truncate(job[
|
| 74 |
|
| 75 |
-
rows.append(
|
|
|
|
|
|
|
| 76 |
|
| 77 |
-
return
|
| 78 |
|
| 79 |
|
| 80 |
def format_scheduled_jobs_table(jobs: List[Dict[str, Any]]) -> str:
|
|
@@ -83,18 +86,18 @@ def format_scheduled_jobs_table(jobs: List[Dict[str, Any]]) -> str:
|
|
| 83 |
return "No scheduled jobs found."
|
| 84 |
|
| 85 |
# Calculate dynamic ID column width
|
| 86 |
-
longest_id_length = max(len(job[
|
| 87 |
-
id_column_width = max(longest_id_length, len(
|
| 88 |
|
| 89 |
# Define column widths
|
| 90 |
col_widths = {
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
| 97 |
-
|
| 98 |
}
|
| 99 |
|
| 100 |
# Build header
|
|
@@ -104,22 +107,27 @@ def format_scheduled_jobs_table(jobs: List[Dict[str, Any]]) -> str:
|
|
| 104 |
# Build rows
|
| 105 |
rows = []
|
| 106 |
for job in jobs:
|
| 107 |
-
job_id = job[
|
| 108 |
-
schedule = truncate(job[
|
| 109 |
-
image = truncate(get_image_or_space(job[
|
| 110 |
-
command = truncate(
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
|
|
|
|
|
|
| 114 |
|
| 115 |
-
rows.append(
|
|
|
|
|
|
|
| 116 |
|
| 117 |
-
return
|
| 118 |
|
| 119 |
|
| 120 |
def format_job_details(jobs: Any) -> str:
|
| 121 |
"""Format job details as JSON in a markdown code block"""
|
| 122 |
import json
|
|
|
|
| 123 |
job_array = jobs if isinstance(jobs, list) else [jobs]
|
| 124 |
json_str = json.dumps(job_array, indent=2)
|
| 125 |
return f"```json\n{json_str}\n```"
|
|
@@ -128,6 +136,7 @@ def format_job_details(jobs: Any) -> str:
|
|
| 128 |
def format_scheduled_job_details(jobs: Any) -> str:
|
| 129 |
"""Format scheduled job details as JSON in a markdown code block"""
|
| 130 |
import json
|
|
|
|
| 131 |
job_array = jobs if isinstance(jobs, list) else [jobs]
|
| 132 |
json_str = json.dumps(job_array, indent=2)
|
| 133 |
return f"```json\n{json_str}\n```"
|
|
|
|
| 3 |
|
| 4 |
Ported from: hf-mcp-server/packages/mcp/src/jobs/formatters.ts
|
| 5 |
"""
|
| 6 |
+
|
| 7 |
from datetime import datetime
|
| 8 |
+
from typing import Any, Dict, List, Optional
|
| 9 |
|
| 10 |
|
| 11 |
def truncate(text: str, max_length: int) -> str:
|
| 12 |
"""Truncate a string to a maximum length with ellipsis"""
|
| 13 |
if len(text) <= max_length:
|
| 14 |
return text
|
| 15 |
+
return text[: max_length - 3] + "..."
|
| 16 |
|
| 17 |
|
| 18 |
def format_date(date_str: Optional[str]) -> str:
|
|
|
|
| 20 |
if not date_str:
|
| 21 |
return "N/A"
|
| 22 |
try:
|
| 23 |
+
date = datetime.fromisoformat(date_str.replace("Z", "+00:00"))
|
| 24 |
+
return date.strftime("%Y-%m-%d %H:%M:%S")
|
| 25 |
except Exception:
|
| 26 |
return date_str
|
| 27 |
|
|
|
|
| 35 |
|
| 36 |
def get_image_or_space(job: Dict[str, Any]) -> str:
|
| 37 |
"""Get image/space identifier from job"""
|
| 38 |
+
if job.get("spaceId"):
|
| 39 |
+
return job["spaceId"]
|
| 40 |
+
if job.get("dockerImage"):
|
| 41 |
+
return job["dockerImage"]
|
| 42 |
return "N/A"
|
| 43 |
|
| 44 |
|
|
|
|
| 48 |
return "No jobs found."
|
| 49 |
|
| 50 |
# Calculate dynamic ID column width
|
| 51 |
+
longest_id_length = max(len(job["id"]) for job in jobs)
|
| 52 |
+
id_column_width = max(longest_id_length, len("JOB ID"))
|
| 53 |
|
| 54 |
# Define column widths
|
| 55 |
col_widths = {
|
| 56 |
+
"id": id_column_width,
|
| 57 |
+
"image": 20,
|
| 58 |
+
"command": 30,
|
| 59 |
+
"created": 19,
|
| 60 |
+
"status": 12,
|
| 61 |
}
|
| 62 |
|
| 63 |
# Build header
|
|
|
|
| 67 |
# Build rows
|
| 68 |
rows = []
|
| 69 |
for job in jobs:
|
| 70 |
+
job_id = job["id"]
|
| 71 |
+
image = truncate(get_image_or_space(job), col_widths["image"])
|
| 72 |
+
command = truncate(format_command(job.get("command")), col_widths["command"])
|
| 73 |
+
created = truncate(format_date(job.get("createdAt")), col_widths["created"])
|
| 74 |
+
status = truncate(job["status"]["stage"], col_widths["status"])
|
| 75 |
|
| 76 |
+
rows.append(
|
| 77 |
+
f"| {job_id.ljust(col_widths['id'])} | {image.ljust(col_widths['image'])} | {command.ljust(col_widths['command'])} | {created.ljust(col_widths['created'])} | {status.ljust(col_widths['status'])} |"
|
| 78 |
+
)
|
| 79 |
|
| 80 |
+
return "\n".join([header, separator] + rows)
|
| 81 |
|
| 82 |
|
| 83 |
def format_scheduled_jobs_table(jobs: List[Dict[str, Any]]) -> str:
|
|
|
|
| 86 |
return "No scheduled jobs found."
|
| 87 |
|
| 88 |
# Calculate dynamic ID column width
|
| 89 |
+
longest_id_length = max(len(job["id"]) for job in jobs)
|
| 90 |
+
id_column_width = max(longest_id_length, len("ID"))
|
| 91 |
|
| 92 |
# Define column widths
|
| 93 |
col_widths = {
|
| 94 |
+
"id": id_column_width,
|
| 95 |
+
"schedule": 12,
|
| 96 |
+
"image": 18,
|
| 97 |
+
"command": 25,
|
| 98 |
+
"lastRun": 19,
|
| 99 |
+
"nextRun": 19,
|
| 100 |
+
"suspend": 9,
|
| 101 |
}
|
| 102 |
|
| 103 |
# Build header
|
|
|
|
| 107 |
# Build rows
|
| 108 |
rows = []
|
| 109 |
for job in jobs:
|
| 110 |
+
job_id = job["id"]
|
| 111 |
+
schedule = truncate(job["schedule"], col_widths["schedule"])
|
| 112 |
+
image = truncate(get_image_or_space(job["jobSpec"]), col_widths["image"])
|
| 113 |
+
command = truncate(
|
| 114 |
+
format_command(job["jobSpec"].get("command")), col_widths["command"]
|
| 115 |
+
)
|
| 116 |
+
last_run = truncate(format_date(job.get("lastRun")), col_widths["lastRun"])
|
| 117 |
+
next_run = truncate(format_date(job.get("nextRun")), col_widths["nextRun"])
|
| 118 |
+
suspend = "Yes" if job.get("suspend") else "No"
|
| 119 |
|
| 120 |
+
rows.append(
|
| 121 |
+
f"| {job_id.ljust(col_widths['id'])} | {schedule.ljust(col_widths['schedule'])} | {image.ljust(col_widths['image'])} | {command.ljust(col_widths['command'])} | {last_run.ljust(col_widths['lastRun'])} | {next_run.ljust(col_widths['nextRun'])} | {suspend.ljust(col_widths['suspend'])} |"
|
| 122 |
+
)
|
| 123 |
|
| 124 |
+
return "\n".join([header, separator] + rows)
|
| 125 |
|
| 126 |
|
| 127 |
def format_job_details(jobs: Any) -> str:
|
| 128 |
"""Format job details as JSON in a markdown code block"""
|
| 129 |
import json
|
| 130 |
+
|
| 131 |
job_array = jobs if isinstance(jobs, list) else [jobs]
|
| 132 |
json_str = json.dumps(job_array, indent=2)
|
| 133 |
return f"```json\n{json_str}\n```"
|
|
|
|
| 136 |
def format_scheduled_job_details(jobs: Any) -> str:
|
| 137 |
"""Format scheduled job details as JSON in a markdown code block"""
|
| 138 |
import json
|
| 139 |
+
|
| 140 |
job_array = jobs if isinstance(jobs, list) else [jobs]
|
| 141 |
json_str = json.dumps(job_array, indent=2)
|
| 142 |
return f"```json\n{json_str}\n```"
|
pyproject.toml
CHANGED
|
@@ -19,4 +19,5 @@ dependencies = [
|
|
| 19 |
"lmnr[all]>=0.7.23",
|
| 20 |
"transformers>=2.3.0",
|
| 21 |
"torch>=2.9.1",
|
|
|
|
| 22 |
]
|
|
|
|
| 19 |
"lmnr[all]>=0.7.23",
|
| 20 |
"transformers>=2.3.0",
|
| 21 |
"torch>=2.9.1",
|
| 22 |
+
"pytest>=9.0.2",
|
| 23 |
]
|
tests/__init__.py
ADDED
|
File without changes
|
tests/integration/__init__.py
ADDED
|
File without changes
|
tests/integration/tools/__init__.py
ADDED
|
File without changes
|
tests/integration/tools/test_jobs_integration.py
ADDED
|
@@ -0,0 +1,452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/usr/bin/env python3
|
| 2 |
+
"""
|
| 3 |
+
Integration tests for refactored HF Jobs Tool
|
| 4 |
+
Tests with real HF API using HF_TOKEN from environment
|
| 5 |
+
"""
|
| 6 |
+
import os
|
| 7 |
+
import sys
|
| 8 |
+
import asyncio
|
| 9 |
+
import time
|
| 10 |
+
|
| 11 |
+
# Add parent directory to path
|
| 12 |
+
sys.path.insert(0, '.')
|
| 13 |
+
|
| 14 |
+
from agent.tools.jobs_tool import HfJobsTool
|
| 15 |
+
|
| 16 |
+
# ANSI color codes for better output
|
| 17 |
+
GREEN = '\033[92m'
|
| 18 |
+
YELLOW = '\033[93m'
|
| 19 |
+
RED = '\033[91m'
|
| 20 |
+
BLUE = '\033[94m'
|
| 21 |
+
RESET = '\033[0m'
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
def print_test(msg):
|
| 25 |
+
"""Print test message in blue"""
|
| 26 |
+
print(f"{BLUE}[TEST]{RESET} {msg}")
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
def print_success(msg):
|
| 30 |
+
"""Print success message in green"""
|
| 31 |
+
print(f"{GREEN}✓{RESET} {msg}")
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def print_warning(msg):
|
| 35 |
+
"""Print warning message in yellow"""
|
| 36 |
+
print(f"{YELLOW}⚠{RESET} {msg}")
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def print_error(msg):
|
| 40 |
+
"""Print error message in red"""
|
| 41 |
+
print(f"{RED}✗{RESET} {msg}")
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
async def test_basic_job_run(tool):
|
| 45 |
+
"""Test running a basic job"""
|
| 46 |
+
print_test("Running a simple Python job...")
|
| 47 |
+
|
| 48 |
+
result = await tool.execute({
|
| 49 |
+
"operation": "run",
|
| 50 |
+
"args": {
|
| 51 |
+
"image": "python:3.12",
|
| 52 |
+
"command": ["python", "-c", "print('Hello from HF Jobs!')"],
|
| 53 |
+
"flavor": "cpu-basic",
|
| 54 |
+
"timeout": "5m",
|
| 55 |
+
"detach": True # Don't wait for completion
|
| 56 |
+
}
|
| 57 |
+
})
|
| 58 |
+
|
| 59 |
+
if result.get("isError"):
|
| 60 |
+
print_error(f"Failed to run job: {result['formatted']}")
|
| 61 |
+
return None
|
| 62 |
+
|
| 63 |
+
# Extract job ID from response
|
| 64 |
+
import re
|
| 65 |
+
job_id_match = re.search(r'\*\*Job ID:\*\* (\S+)', result['formatted'])
|
| 66 |
+
if job_id_match:
|
| 67 |
+
job_id = job_id_match.group(1)
|
| 68 |
+
print_success(f"Job started with ID: {job_id}")
|
| 69 |
+
return job_id
|
| 70 |
+
|
| 71 |
+
print_error("Could not extract job ID from response")
|
| 72 |
+
return None
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
async def test_list_jobs(tool):
|
| 76 |
+
"""Test listing jobs"""
|
| 77 |
+
print_test("Listing running jobs...")
|
| 78 |
+
|
| 79 |
+
result = await tool.execute({
|
| 80 |
+
"operation": "ps",
|
| 81 |
+
"args": {}
|
| 82 |
+
})
|
| 83 |
+
|
| 84 |
+
if result.get("isError"):
|
| 85 |
+
print_error(f"Failed to list jobs: {result['formatted']}")
|
| 86 |
+
return False
|
| 87 |
+
|
| 88 |
+
print_success(f"Listed jobs: {result['totalResults']} running")
|
| 89 |
+
if result['totalResults'] > 0:
|
| 90 |
+
print(f" {result['formatted'][:200]}...")
|
| 91 |
+
return True
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
async def test_inspect_job(tool, job_id):
|
| 95 |
+
"""Test inspecting a specific job"""
|
| 96 |
+
print_test(f"Inspecting job {job_id}...")
|
| 97 |
+
|
| 98 |
+
result = await tool.execute({
|
| 99 |
+
"operation": "inspect",
|
| 100 |
+
"args": {
|
| 101 |
+
"job_id": job_id
|
| 102 |
+
}
|
| 103 |
+
})
|
| 104 |
+
|
| 105 |
+
if result.get("isError"):
|
| 106 |
+
print_error(f"Failed to inspect job: {result['formatted']}")
|
| 107 |
+
return False
|
| 108 |
+
|
| 109 |
+
print_success(f"Inspected job successfully")
|
| 110 |
+
return True
|
| 111 |
+
|
| 112 |
+
|
| 113 |
+
async def test_get_logs(tool, job_id):
|
| 114 |
+
"""Test fetching job logs"""
|
| 115 |
+
print_test(f"Fetching logs for job {job_id}...")
|
| 116 |
+
|
| 117 |
+
# Wait a bit for logs to be available
|
| 118 |
+
await asyncio.sleep(2)
|
| 119 |
+
|
| 120 |
+
result = await tool.execute({
|
| 121 |
+
"operation": "logs",
|
| 122 |
+
"args": {
|
| 123 |
+
"job_id": job_id
|
| 124 |
+
}
|
| 125 |
+
})
|
| 126 |
+
|
| 127 |
+
if result.get("isError"):
|
| 128 |
+
print_warning(f"Could not fetch logs (might be too early): {result['formatted'][:100]}")
|
| 129 |
+
return False
|
| 130 |
+
|
| 131 |
+
print_success(f"Fetched logs successfully")
|
| 132 |
+
if "Hello from HF Jobs!" in result['formatted']:
|
| 133 |
+
print_success(" Found expected output in logs!")
|
| 134 |
+
return True
|
| 135 |
+
|
| 136 |
+
|
| 137 |
+
async def test_cancel_job(tool, job_id):
|
| 138 |
+
"""Test cancelling a job"""
|
| 139 |
+
print_test(f"Cancelling job {job_id}...")
|
| 140 |
+
|
| 141 |
+
result = await tool.execute({
|
| 142 |
+
"operation": "cancel",
|
| 143 |
+
"args": {
|
| 144 |
+
"job_id": job_id
|
| 145 |
+
}
|
| 146 |
+
})
|
| 147 |
+
|
| 148 |
+
if result.get("isError"):
|
| 149 |
+
print_error(f"Failed to cancel job: {result['formatted']}")
|
| 150 |
+
return False
|
| 151 |
+
|
| 152 |
+
print_success(f"Cancelled job successfully")
|
| 153 |
+
return True
|
| 154 |
+
|
| 155 |
+
|
| 156 |
+
async def test_uv_job(tool):
|
| 157 |
+
"""Test running a UV job"""
|
| 158 |
+
print_test("Running a UV Python script job...")
|
| 159 |
+
|
| 160 |
+
result = await tool.execute({
|
| 161 |
+
"operation": "uv",
|
| 162 |
+
"args": {
|
| 163 |
+
"script": "print('Hello from UV!')\nimport sys\nprint(f'Python version: {sys.version}')",
|
| 164 |
+
"flavor": "cpu-basic",
|
| 165 |
+
"timeout": "5m",
|
| 166 |
+
"detach": True
|
| 167 |
+
}
|
| 168 |
+
})
|
| 169 |
+
|
| 170 |
+
if result.get("isError"):
|
| 171 |
+
print_error(f"Failed to run UV job: {result['formatted']}")
|
| 172 |
+
return None
|
| 173 |
+
|
| 174 |
+
# Extract job ID
|
| 175 |
+
import re
|
| 176 |
+
job_id_match = re.search(r'UV Job started: (\S+)', result['formatted'])
|
| 177 |
+
if job_id_match:
|
| 178 |
+
job_id = job_id_match.group(1)
|
| 179 |
+
print_success(f"UV job started with ID: {job_id}")
|
| 180 |
+
return job_id
|
| 181 |
+
|
| 182 |
+
print_error("Could not extract job ID from response")
|
| 183 |
+
return None
|
| 184 |
+
|
| 185 |
+
|
| 186 |
+
async def test_list_all_jobs(tool):
|
| 187 |
+
"""Test listing all jobs (including completed)"""
|
| 188 |
+
print_test("Listing all jobs (including completed)...")
|
| 189 |
+
|
| 190 |
+
result = await tool.execute({
|
| 191 |
+
"operation": "ps",
|
| 192 |
+
"args": {
|
| 193 |
+
"all": True
|
| 194 |
+
}
|
| 195 |
+
})
|
| 196 |
+
|
| 197 |
+
if result.get("isError"):
|
| 198 |
+
print_error(f"Failed to list all jobs: {result['formatted']}")
|
| 199 |
+
return False
|
| 200 |
+
|
| 201 |
+
print_success(f"Listed all jobs: {result['totalResults']} total")
|
| 202 |
+
return True
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
async def test_scheduled_job(tool):
|
| 206 |
+
"""Test creating and managing a scheduled job"""
|
| 207 |
+
print_test("Creating a scheduled job (daily at midnight)...")
|
| 208 |
+
|
| 209 |
+
result = await tool.execute({
|
| 210 |
+
"operation": "scheduled run",
|
| 211 |
+
"args": {
|
| 212 |
+
"image": "python:3.12",
|
| 213 |
+
"command": ["python", "-c", "print('Scheduled job running!')"],
|
| 214 |
+
"schedule": "@daily",
|
| 215 |
+
"flavor": "cpu-basic",
|
| 216 |
+
"timeout": "5m"
|
| 217 |
+
}
|
| 218 |
+
})
|
| 219 |
+
|
| 220 |
+
if result.get("isError"):
|
| 221 |
+
print_error(f"Failed to create scheduled job: {result['formatted']}")
|
| 222 |
+
return None
|
| 223 |
+
|
| 224 |
+
# Extract scheduled job ID
|
| 225 |
+
import re
|
| 226 |
+
job_id_match = re.search(r'\*\*Scheduled Job ID:\*\* (\S+)', result['formatted'])
|
| 227 |
+
if not job_id_match:
|
| 228 |
+
print_error("Could not extract scheduled job ID")
|
| 229 |
+
return None
|
| 230 |
+
|
| 231 |
+
scheduled_job_id = job_id_match.group(1)
|
| 232 |
+
print_success(f"Scheduled job created with ID: {scheduled_job_id}")
|
| 233 |
+
return scheduled_job_id
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
async def test_list_scheduled_jobs(tool):
|
| 237 |
+
"""Test listing scheduled jobs"""
|
| 238 |
+
print_test("Listing scheduled jobs...")
|
| 239 |
+
|
| 240 |
+
result = await tool.execute({
|
| 241 |
+
"operation": "scheduled ps",
|
| 242 |
+
"args": {}
|
| 243 |
+
})
|
| 244 |
+
|
| 245 |
+
if result.get("isError"):
|
| 246 |
+
print_error(f"Failed to list scheduled jobs: {result['formatted']}")
|
| 247 |
+
return False
|
| 248 |
+
|
| 249 |
+
print_success(f"Listed scheduled jobs: {result['totalResults']} active")
|
| 250 |
+
return True
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
async def test_inspect_scheduled_job(tool, scheduled_job_id):
|
| 254 |
+
"""Test inspecting a scheduled job"""
|
| 255 |
+
print_test(f"Inspecting scheduled job {scheduled_job_id}...")
|
| 256 |
+
|
| 257 |
+
result = await tool.execute({
|
| 258 |
+
"operation": "scheduled inspect",
|
| 259 |
+
"args": {
|
| 260 |
+
"scheduled_job_id": scheduled_job_id
|
| 261 |
+
}
|
| 262 |
+
})
|
| 263 |
+
|
| 264 |
+
if result.get("isError"):
|
| 265 |
+
print_error(f"Failed to inspect scheduled job: {result['formatted']}")
|
| 266 |
+
return False
|
| 267 |
+
|
| 268 |
+
print_success(f"Inspected scheduled job successfully")
|
| 269 |
+
return True
|
| 270 |
+
|
| 271 |
+
|
| 272 |
+
async def test_suspend_scheduled_job(tool, scheduled_job_id):
|
| 273 |
+
"""Test suspending a scheduled job"""
|
| 274 |
+
print_test(f"Suspending scheduled job {scheduled_job_id}...")
|
| 275 |
+
|
| 276 |
+
result = await tool.execute({
|
| 277 |
+
"operation": "scheduled suspend",
|
| 278 |
+
"args": {
|
| 279 |
+
"scheduled_job_id": scheduled_job_id
|
| 280 |
+
}
|
| 281 |
+
})
|
| 282 |
+
|
| 283 |
+
if result.get("isError"):
|
| 284 |
+
print_error(f"Failed to suspend scheduled job: {result['formatted']}")
|
| 285 |
+
return False
|
| 286 |
+
|
| 287 |
+
print_success(f"Suspended scheduled job successfully")
|
| 288 |
+
return True
|
| 289 |
+
|
| 290 |
+
|
| 291 |
+
async def test_resume_scheduled_job(tool, scheduled_job_id):
|
| 292 |
+
"""Test resuming a scheduled job"""
|
| 293 |
+
print_test(f"Resuming scheduled job {scheduled_job_id}...")
|
| 294 |
+
|
| 295 |
+
result = await tool.execute({
|
| 296 |
+
"operation": "scheduled resume",
|
| 297 |
+
"args": {
|
| 298 |
+
"scheduled_job_id": scheduled_job_id
|
| 299 |
+
}
|
| 300 |
+
})
|
| 301 |
+
|
| 302 |
+
if result.get("isError"):
|
| 303 |
+
print_error(f"Failed to resume scheduled job: {result['formatted']}")
|
| 304 |
+
return False
|
| 305 |
+
|
| 306 |
+
print_success(f"Resumed scheduled job successfully")
|
| 307 |
+
return True
|
| 308 |
+
|
| 309 |
+
|
| 310 |
+
async def test_delete_scheduled_job(tool, scheduled_job_id):
|
| 311 |
+
"""Test deleting a scheduled job"""
|
| 312 |
+
print_test(f"Deleting scheduled job {scheduled_job_id}...")
|
| 313 |
+
|
| 314 |
+
result = await tool.execute({
|
| 315 |
+
"operation": "scheduled delete",
|
| 316 |
+
"args": {
|
| 317 |
+
"scheduled_job_id": scheduled_job_id
|
| 318 |
+
}
|
| 319 |
+
})
|
| 320 |
+
|
| 321 |
+
if result.get("isError"):
|
| 322 |
+
print_error(f"Failed to delete scheduled job: {result['formatted']}")
|
| 323 |
+
return False
|
| 324 |
+
|
| 325 |
+
print_success(f"Deleted scheduled job successfully")
|
| 326 |
+
return True
|
| 327 |
+
|
| 328 |
+
|
| 329 |
+
async def main():
|
| 330 |
+
"""Run all integration tests"""
|
| 331 |
+
print("=" * 70)
|
| 332 |
+
print(f"{BLUE}HF Jobs Tool - Integration Tests{RESET}")
|
| 333 |
+
print("=" * 70)
|
| 334 |
+
print()
|
| 335 |
+
|
| 336 |
+
# Check for HF_TOKEN
|
| 337 |
+
hf_token = os.environ.get('HF_TOKEN')
|
| 338 |
+
if not hf_token:
|
| 339 |
+
print_error("HF_TOKEN not found in environment variables!")
|
| 340 |
+
print_warning("Set it with: export HF_TOKEN='your_token_here'")
|
| 341 |
+
sys.exit(1)
|
| 342 |
+
|
| 343 |
+
print_success(f"Found HF_TOKEN (length: {len(hf_token)})")
|
| 344 |
+
print()
|
| 345 |
+
|
| 346 |
+
# Initialize tool with token
|
| 347 |
+
tool = HfJobsTool(hf_token=hf_token)
|
| 348 |
+
|
| 349 |
+
# Track job IDs for cleanup
|
| 350 |
+
job_ids = []
|
| 351 |
+
scheduled_job_ids = []
|
| 352 |
+
|
| 353 |
+
try:
|
| 354 |
+
# Test 1: Run basic job
|
| 355 |
+
print(f"\n{YELLOW}{'=' * 70}{RESET}")
|
| 356 |
+
print(f"{YELLOW}Test Suite 1: Regular Jobs{RESET}")
|
| 357 |
+
print(f"{YELLOW}{'=' * 70}{RESET}\n")
|
| 358 |
+
|
| 359 |
+
job_id = await test_basic_job_run(tool)
|
| 360 |
+
if job_id:
|
| 361 |
+
job_ids.append(job_id)
|
| 362 |
+
|
| 363 |
+
# Wait a moment for job to register
|
| 364 |
+
await asyncio.sleep(1)
|
| 365 |
+
|
| 366 |
+
# Test 2: List jobs
|
| 367 |
+
await test_list_jobs(tool)
|
| 368 |
+
|
| 369 |
+
# Test 3: Inspect job
|
| 370 |
+
await test_inspect_job(tool, job_id)
|
| 371 |
+
|
| 372 |
+
# Test 4: Get logs
|
| 373 |
+
await test_get_logs(tool, job_id)
|
| 374 |
+
|
| 375 |
+
# Test 5: Cancel job (cleanup)
|
| 376 |
+
await test_cancel_job(tool, job_id)
|
| 377 |
+
|
| 378 |
+
# Test 6: UV job
|
| 379 |
+
print()
|
| 380 |
+
uv_job_id = await test_uv_job(tool)
|
| 381 |
+
if uv_job_id:
|
| 382 |
+
job_ids.append(uv_job_id)
|
| 383 |
+
await asyncio.sleep(1)
|
| 384 |
+
await test_cancel_job(tool, uv_job_id)
|
| 385 |
+
|
| 386 |
+
# Test 7: List all jobs
|
| 387 |
+
print()
|
| 388 |
+
await test_list_all_jobs(tool)
|
| 389 |
+
|
| 390 |
+
# Test Suite 2: Scheduled Jobs
|
| 391 |
+
print(f"\n{YELLOW}{'=' * 70}{RESET}")
|
| 392 |
+
print(f"{YELLOW}Test Suite 2: Scheduled Jobs{RESET}")
|
| 393 |
+
print(f"{YELLOW}{'=' * 70}{RESET}\n")
|
| 394 |
+
|
| 395 |
+
scheduled_job_id = await test_scheduled_job(tool)
|
| 396 |
+
if scheduled_job_id:
|
| 397 |
+
scheduled_job_ids.append(scheduled_job_id)
|
| 398 |
+
|
| 399 |
+
# Wait a moment for job to register
|
| 400 |
+
await asyncio.sleep(1)
|
| 401 |
+
|
| 402 |
+
# Test scheduled job operations
|
| 403 |
+
await test_list_scheduled_jobs(tool)
|
| 404 |
+
print()
|
| 405 |
+
await test_inspect_scheduled_job(tool, scheduled_job_id)
|
| 406 |
+
print()
|
| 407 |
+
await test_suspend_scheduled_job(tool, scheduled_job_id)
|
| 408 |
+
print()
|
| 409 |
+
await test_resume_scheduled_job(tool, scheduled_job_id)
|
| 410 |
+
print()
|
| 411 |
+
|
| 412 |
+
# Cleanup: Delete scheduled job
|
| 413 |
+
await test_delete_scheduled_job(tool, scheduled_job_id)
|
| 414 |
+
|
| 415 |
+
# Final summary
|
| 416 |
+
print(f"\n{YELLOW}{'=' * 70}{RESET}")
|
| 417 |
+
print(f"{GREEN}✓ All integration tests completed!{RESET}")
|
| 418 |
+
print(f"{YELLOW}{'=' * 70}{RESET}\n")
|
| 419 |
+
|
| 420 |
+
print_success("Refactored implementation works correctly with real HF API")
|
| 421 |
+
print_success("All 13 operations tested and verified")
|
| 422 |
+
print()
|
| 423 |
+
print(f"{BLUE}Summary:{RESET}")
|
| 424 |
+
print(f" • Regular jobs: ✓ run, list, inspect, logs, cancel")
|
| 425 |
+
print(f" • UV jobs: ✓ run")
|
| 426 |
+
print(f" • Scheduled jobs: ✓ create, list, inspect, suspend, resume, delete")
|
| 427 |
+
print()
|
| 428 |
+
|
| 429 |
+
except Exception as e:
|
| 430 |
+
print_error(f"Test failed with exception: {str(e)}")
|
| 431 |
+
import traceback
|
| 432 |
+
traceback.print_exc()
|
| 433 |
+
|
| 434 |
+
# Attempt cleanup
|
| 435 |
+
print(f"\n{YELLOW}Attempting cleanup...{RESET}")
|
| 436 |
+
for job_id in job_ids:
|
| 437 |
+
try:
|
| 438 |
+
await test_cancel_job(tool, job_id)
|
| 439 |
+
except:
|
| 440 |
+
pass
|
| 441 |
+
|
| 442 |
+
for scheduled_job_id in scheduled_job_ids:
|
| 443 |
+
try:
|
| 444 |
+
await test_delete_scheduled_job(tool, scheduled_job_id)
|
| 445 |
+
except:
|
| 446 |
+
pass
|
| 447 |
+
|
| 448 |
+
sys.exit(1)
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
if __name__ == "__main__":
|
| 452 |
+
asyncio.run(main())
|
tests/tools/__init__.py
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
"""Tests for agent tools"""
|
|
|
|
|
|
tests/tools/hf/__init__.py
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
"""Tests for HF tools"""
|
|
|
|
|
|
tests/tools/hf/jobs/__init__.py
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
"""Tests for HF Jobs tool"""
|
|
|
|
|
|
tests/tools/hf/jobs/test_jobs_tool.py
DELETED
|
@@ -1,252 +0,0 @@
|
|
| 1 |
-
"""
|
| 2 |
-
Tests for HF Jobs Tool
|
| 3 |
-
|
| 4 |
-
Tests the jobs tool implementation
|
| 5 |
-
"""
|
| 6 |
-
import pytest
|
| 7 |
-
from unittest.mock import AsyncMock, MagicMock, patch
|
| 8 |
-
from agent.tools.hf.jobs.jobs_tool import HfJobsTool, hf_jobs_handler
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
@pytest.mark.asyncio
|
| 12 |
-
async def test_show_help():
|
| 13 |
-
"""Test that help message is shown when no operation specified"""
|
| 14 |
-
tool = HfJobsTool()
|
| 15 |
-
result = await tool.execute({})
|
| 16 |
-
|
| 17 |
-
assert "HuggingFace Jobs API" in result["formatted"]
|
| 18 |
-
assert "Available Commands" in result["formatted"]
|
| 19 |
-
assert result["totalResults"] == 1
|
| 20 |
-
assert not result.get("isError", False)
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
@pytest.mark.asyncio
|
| 24 |
-
async def test_show_operation_help():
|
| 25 |
-
"""Test operation-specific help"""
|
| 26 |
-
tool = HfJobsTool()
|
| 27 |
-
result = await tool.execute({"operation": "run", "args": {"help": True}})
|
| 28 |
-
|
| 29 |
-
assert "Help for operation" in result["formatted"]
|
| 30 |
-
assert result["totalResults"] == 1
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
@pytest.mark.asyncio
|
| 34 |
-
async def test_invalid_operation():
|
| 35 |
-
"""Test invalid operation handling"""
|
| 36 |
-
tool = HfJobsTool()
|
| 37 |
-
result = await tool.execute({"operation": "invalid_op"})
|
| 38 |
-
|
| 39 |
-
assert result.get("isError") == True
|
| 40 |
-
assert "Unknown operation" in result["formatted"]
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
@pytest.mark.asyncio
|
| 44 |
-
async def test_run_job_missing_command():
|
| 45 |
-
"""Test run job with missing required parameter"""
|
| 46 |
-
tool = HfJobsTool()
|
| 47 |
-
result = await tool.execute({
|
| 48 |
-
"operation": "run",
|
| 49 |
-
"args": {"image": "python:3.12"}
|
| 50 |
-
})
|
| 51 |
-
|
| 52 |
-
assert result.get("isError") == True
|
| 53 |
-
assert "command parameter is required" in result["formatted"]
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
@pytest.mark.asyncio
|
| 57 |
-
async def test_list_jobs_mock():
|
| 58 |
-
"""Test list jobs with mock API"""
|
| 59 |
-
tool = HfJobsTool()
|
| 60 |
-
|
| 61 |
-
# Mock the API client
|
| 62 |
-
with patch.object(tool.client, 'list_jobs', new_callable=AsyncMock) as mock_list:
|
| 63 |
-
mock_list.return_value = [
|
| 64 |
-
{
|
| 65 |
-
'id': 'test-job-1',
|
| 66 |
-
'status': {'stage': 'RUNNING'},
|
| 67 |
-
'command': ['echo', 'test'],
|
| 68 |
-
'createdAt': '2024-01-01T00:00:00Z',
|
| 69 |
-
'owner': {'name': 'test-user'}
|
| 70 |
-
},
|
| 71 |
-
{
|
| 72 |
-
'id': 'test-job-2',
|
| 73 |
-
'status': {'stage': 'COMPLETED'},
|
| 74 |
-
'command': ['python', 'script.py'],
|
| 75 |
-
'createdAt': '2024-01-01T01:00:00Z',
|
| 76 |
-
'owner': {'name': 'test-user'}
|
| 77 |
-
}
|
| 78 |
-
]
|
| 79 |
-
|
| 80 |
-
# Test listing only running jobs (default)
|
| 81 |
-
result = await tool.execute({"operation": "ps"})
|
| 82 |
-
|
| 83 |
-
assert not result.get("isError", False)
|
| 84 |
-
assert "test-job-1" in result["formatted"]
|
| 85 |
-
assert "test-job-2" not in result["formatted"] # COMPLETED jobs filtered out
|
| 86 |
-
assert result["totalResults"] == 2
|
| 87 |
-
assert result["resultsShared"] == 1
|
| 88 |
-
|
| 89 |
-
# Test listing all jobs
|
| 90 |
-
result = await tool.execute({"operation": "ps", "args": {"all": True}})
|
| 91 |
-
|
| 92 |
-
assert not result.get("isError", False)
|
| 93 |
-
assert "test-job-1" in result["formatted"]
|
| 94 |
-
assert "test-job-2" in result["formatted"]
|
| 95 |
-
assert result["totalResults"] == 2
|
| 96 |
-
assert result["resultsShared"] == 2
|
| 97 |
-
|
| 98 |
-
|
| 99 |
-
@pytest.mark.asyncio
|
| 100 |
-
async def test_inspect_job_mock():
|
| 101 |
-
"""Test inspect job with mock API"""
|
| 102 |
-
tool = HfJobsTool()
|
| 103 |
-
|
| 104 |
-
with patch.object(tool.client, 'get_job', new_callable=AsyncMock) as mock_get:
|
| 105 |
-
mock_get.return_value = {
|
| 106 |
-
'id': 'test-job-1',
|
| 107 |
-
'status': {'stage': 'RUNNING'},
|
| 108 |
-
'command': ['echo', 'test'],
|
| 109 |
-
'createdAt': '2024-01-01T00:00:00Z',
|
| 110 |
-
'owner': {'name': 'test-user'},
|
| 111 |
-
'flavor': 'cpu-basic'
|
| 112 |
-
}
|
| 113 |
-
|
| 114 |
-
result = await tool.execute({
|
| 115 |
-
"operation": "inspect",
|
| 116 |
-
"args": {"job_id": "test-job-1"}
|
| 117 |
-
})
|
| 118 |
-
|
| 119 |
-
assert not result.get("isError", False)
|
| 120 |
-
assert "test-job-1" in result["formatted"]
|
| 121 |
-
assert "Job Details" in result["formatted"]
|
| 122 |
-
mock_get.assert_called_once()
|
| 123 |
-
|
| 124 |
-
|
| 125 |
-
@pytest.mark.asyncio
|
| 126 |
-
async def test_cancel_job_mock():
|
| 127 |
-
"""Test cancel job with mock API"""
|
| 128 |
-
tool = HfJobsTool()
|
| 129 |
-
|
| 130 |
-
with patch.object(tool.client, 'cancel_job', new_callable=AsyncMock) as mock_cancel:
|
| 131 |
-
mock_cancel.return_value = None
|
| 132 |
-
|
| 133 |
-
result = await tool.execute({
|
| 134 |
-
"operation": "cancel",
|
| 135 |
-
"args": {"job_id": "test-job-1"}
|
| 136 |
-
})
|
| 137 |
-
|
| 138 |
-
assert not result.get("isError", False)
|
| 139 |
-
assert "cancelled" in result["formatted"]
|
| 140 |
-
assert "test-job-1" in result["formatted"]
|
| 141 |
-
mock_cancel.assert_called_once()
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
@pytest.mark.asyncio
|
| 145 |
-
async def test_handler():
|
| 146 |
-
"""Test the handler function"""
|
| 147 |
-
with patch('agent.tools.hf.jobs.jobs_tool.HfJobsTool') as MockTool:
|
| 148 |
-
mock_tool_instance = MockTool.return_value
|
| 149 |
-
mock_tool_instance.execute = AsyncMock(return_value={
|
| 150 |
-
"formatted": "Test output",
|
| 151 |
-
"totalResults": 1,
|
| 152 |
-
"resultsShared": 1,
|
| 153 |
-
"isError": False
|
| 154 |
-
})
|
| 155 |
-
|
| 156 |
-
output, success = await hf_jobs_handler({"operation": "ps"})
|
| 157 |
-
|
| 158 |
-
assert success == True
|
| 159 |
-
assert "Test output" in output
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
@pytest.mark.asyncio
|
| 163 |
-
async def test_handler_error():
|
| 164 |
-
"""Test handler with error"""
|
| 165 |
-
with patch('agent.tools.hf.jobs.jobs_tool.HfJobsTool') as MockTool:
|
| 166 |
-
MockTool.side_effect = Exception("Test error")
|
| 167 |
-
|
| 168 |
-
output, success = await hf_jobs_handler({})
|
| 169 |
-
|
| 170 |
-
assert success == False
|
| 171 |
-
assert "Error" in output
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
@pytest.mark.asyncio
|
| 175 |
-
async def test_scheduled_jobs_mock():
|
| 176 |
-
"""Test scheduled jobs operations with mock API"""
|
| 177 |
-
tool = HfJobsTool()
|
| 178 |
-
|
| 179 |
-
# Test list scheduled jobs
|
| 180 |
-
with patch.object(tool.client, 'list_scheduled_jobs', new_callable=AsyncMock) as mock_list:
|
| 181 |
-
mock_list.return_value = [
|
| 182 |
-
{
|
| 183 |
-
'id': 'sched-job-1',
|
| 184 |
-
'schedule': '@daily',
|
| 185 |
-
'suspend': False,
|
| 186 |
-
'jobSpec': {
|
| 187 |
-
'command': ['python', 'backup.py'],
|
| 188 |
-
'dockerImage': 'python:3.12'
|
| 189 |
-
},
|
| 190 |
-
'nextRun': '2024-01-02T00:00:00Z'
|
| 191 |
-
}
|
| 192 |
-
]
|
| 193 |
-
|
| 194 |
-
result = await tool.execute({"operation": "scheduled ps"})
|
| 195 |
-
|
| 196 |
-
assert not result.get("isError", False)
|
| 197 |
-
assert "sched-job-1" in result["formatted"]
|
| 198 |
-
assert "Scheduled Jobs" in result["formatted"]
|
| 199 |
-
|
| 200 |
-
|
| 201 |
-
def test_job_utils():
|
| 202 |
-
"""Test job utility functions"""
|
| 203 |
-
from agent.tools.hf.jobs.job_utils import parse_timeout, parse_image_source, parse_command
|
| 204 |
-
|
| 205 |
-
# Test timeout parsing
|
| 206 |
-
assert parse_timeout("5m") == 300
|
| 207 |
-
assert parse_timeout("2h") == 7200
|
| 208 |
-
assert parse_timeout("30s") == 30
|
| 209 |
-
assert parse_timeout("1d") == 86400
|
| 210 |
-
|
| 211 |
-
# Test image source parsing
|
| 212 |
-
result = parse_image_source("python:3.12")
|
| 213 |
-
assert result["dockerImage"] == "python:3.12"
|
| 214 |
-
assert result["spaceId"] is None
|
| 215 |
-
|
| 216 |
-
result = parse_image_source("https://huggingface.co/spaces/user/space")
|
| 217 |
-
assert result["dockerImage"] is None
|
| 218 |
-
assert result["spaceId"] == "user/space"
|
| 219 |
-
|
| 220 |
-
# Test command parsing
|
| 221 |
-
result = parse_command(["python", "script.py"])
|
| 222 |
-
assert result["command"] == ["python", "script.py"]
|
| 223 |
-
|
| 224 |
-
result = parse_command("python script.py")
|
| 225 |
-
assert result["command"] == ["python", "script.py"]
|
| 226 |
-
|
| 227 |
-
|
| 228 |
-
def test_uv_utils():
|
| 229 |
-
"""Test UV utility functions"""
|
| 230 |
-
from agent.tools.hf.jobs.uv_utils import build_uv_command, resolve_uv_command
|
| 231 |
-
|
| 232 |
-
# Test build UV command
|
| 233 |
-
command = build_uv_command("script.py", {})
|
| 234 |
-
assert command == ["uv", "run", "script.py"]
|
| 235 |
-
|
| 236 |
-
command = build_uv_command("script.py", {
|
| 237 |
-
"with_deps": ["requests", "numpy"],
|
| 238 |
-
"python": "3.12"
|
| 239 |
-
})
|
| 240 |
-
assert "uv" in command
|
| 241 |
-
assert "run" in command
|
| 242 |
-
assert "--with" in command
|
| 243 |
-
assert "requests" in command
|
| 244 |
-
assert "-p" in command
|
| 245 |
-
assert "3.12" in command
|
| 246 |
-
|
| 247 |
-
# Test resolve UV command
|
| 248 |
-
command = resolve_uv_command({"script": "https://example.com/script.py"})
|
| 249 |
-
assert "https://example.com/script.py" in command
|
| 250 |
-
|
| 251 |
-
command = resolve_uv_command({"script": "print('hello')"})
|
| 252 |
-
assert command == ["uv", "run", "print('hello')"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
tests/unit/__init__.py
ADDED
|
File without changes
|
tests/unit/tools/__init__.py
ADDED
|
File without changes
|
tests/unit/tools/test_jobs_tool.py
ADDED
|
@@ -0,0 +1,454 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for HF Jobs Tool
|
| 3 |
+
|
| 4 |
+
Tests the refactored jobs tool implementation using huggingface-hub library
|
| 5 |
+
"""
|
| 6 |
+
|
| 7 |
+
from unittest.mock import AsyncMock, patch
|
| 8 |
+
|
| 9 |
+
import pytest
|
| 10 |
+
|
| 11 |
+
from agent.tools.jobs_tool import HfJobsTool, hf_jobs_handler
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
def create_mock_job_info(
|
| 15 |
+
job_id="test-job-1",
|
| 16 |
+
stage="RUNNING",
|
| 17 |
+
command=None,
|
| 18 |
+
docker_image="python:3.12",
|
| 19 |
+
):
|
| 20 |
+
"""Create a mock JobInfo object"""
|
| 21 |
+
from huggingface_hub._jobs_api import JobInfo
|
| 22 |
+
|
| 23 |
+
if command is None:
|
| 24 |
+
command = ["echo", "test"]
|
| 25 |
+
|
| 26 |
+
return JobInfo(
|
| 27 |
+
id=job_id,
|
| 28 |
+
created_at="2024-01-01T00:00:00.000000Z",
|
| 29 |
+
docker_image=docker_image,
|
| 30 |
+
space_id=None,
|
| 31 |
+
command=command,
|
| 32 |
+
arguments=[],
|
| 33 |
+
environment={},
|
| 34 |
+
secrets={},
|
| 35 |
+
flavor="cpu-basic",
|
| 36 |
+
status={"stage": stage, "message": None},
|
| 37 |
+
owner={"id": "123", "name": "test-user", "type": "user"},
|
| 38 |
+
endpoint="https://huggingface.co",
|
| 39 |
+
url=f"https://huggingface.co/jobs/test-user/{job_id}",
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def create_mock_scheduled_job_info(
|
| 44 |
+
job_id="sched-job-1",
|
| 45 |
+
schedule="@daily",
|
| 46 |
+
suspend=False,
|
| 47 |
+
):
|
| 48 |
+
"""Create a mock ScheduledJobInfo object"""
|
| 49 |
+
from huggingface_hub._jobs_api import ScheduledJobInfo
|
| 50 |
+
|
| 51 |
+
return ScheduledJobInfo(
|
| 52 |
+
id=job_id,
|
| 53 |
+
created_at="2024-01-01T00:00:00.000000Z",
|
| 54 |
+
job_spec={
|
| 55 |
+
"docker_image": "python:3.12",
|
| 56 |
+
"space_id": None,
|
| 57 |
+
"command": ["python", "backup.py"],
|
| 58 |
+
"arguments": [],
|
| 59 |
+
"environment": {},
|
| 60 |
+
"secrets": {},
|
| 61 |
+
"flavor": "cpu-basic",
|
| 62 |
+
"timeout": 1800,
|
| 63 |
+
"tags": None,
|
| 64 |
+
"arch": None,
|
| 65 |
+
},
|
| 66 |
+
schedule=schedule,
|
| 67 |
+
suspend=suspend,
|
| 68 |
+
concurrency=False,
|
| 69 |
+
status={
|
| 70 |
+
"last_job": None,
|
| 71 |
+
"next_job_run_at": "2024-01-02T00:00:00.000000Z",
|
| 72 |
+
},
|
| 73 |
+
owner={"id": "123", "name": "test-user", "type": "user"},
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
@pytest.mark.asyncio
|
| 78 |
+
async def test_show_help():
|
| 79 |
+
"""Test that help message is shown when no operation specified"""
|
| 80 |
+
tool = HfJobsTool()
|
| 81 |
+
result = await tool.execute({})
|
| 82 |
+
|
| 83 |
+
assert "HuggingFace Jobs API" in result["formatted"]
|
| 84 |
+
assert "Available Commands" in result["formatted"]
|
| 85 |
+
assert result["totalResults"] == 1
|
| 86 |
+
assert not result.get("isError", False)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
@pytest.mark.asyncio
|
| 90 |
+
async def test_show_operation_help():
|
| 91 |
+
"""Test operation-specific help"""
|
| 92 |
+
tool = HfJobsTool()
|
| 93 |
+
result = await tool.execute({"operation": "run", "args": {"help": True}})
|
| 94 |
+
|
| 95 |
+
assert "Help for operation" in result["formatted"]
|
| 96 |
+
assert result["totalResults"] == 1
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@pytest.mark.asyncio
|
| 100 |
+
async def test_invalid_operation():
|
| 101 |
+
"""Test invalid operation handling"""
|
| 102 |
+
tool = HfJobsTool()
|
| 103 |
+
result = await tool.execute({"operation": "invalid_op"})
|
| 104 |
+
|
| 105 |
+
assert result.get("isError") == True
|
| 106 |
+
assert "Unknown operation" in result["formatted"]
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
@pytest.mark.asyncio
|
| 110 |
+
async def test_run_job_missing_command():
|
| 111 |
+
"""Test run job with missing required parameter"""
|
| 112 |
+
tool = HfJobsTool()
|
| 113 |
+
|
| 114 |
+
# Mock the HfApi.run_job to raise an error
|
| 115 |
+
with patch.object(tool.api, "run_job") as mock_run:
|
| 116 |
+
mock_run.side_effect = Exception("command parameter is required")
|
| 117 |
+
|
| 118 |
+
result = await tool.execute(
|
| 119 |
+
{"operation": "run", "args": {"image": "python:3.12"}}
|
| 120 |
+
)
|
| 121 |
+
|
| 122 |
+
assert result.get("isError") == True
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
@pytest.mark.asyncio
|
| 126 |
+
async def test_list_jobs_mock():
|
| 127 |
+
"""Test list jobs with mock API"""
|
| 128 |
+
tool = HfJobsTool()
|
| 129 |
+
|
| 130 |
+
# Create mock job objects
|
| 131 |
+
running_job = create_mock_job_info("test-job-1", "RUNNING")
|
| 132 |
+
completed_job = create_mock_job_info(
|
| 133 |
+
"test-job-2", "COMPLETED", ["python", "script.py"]
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
# Mock the HfApi.list_jobs method
|
| 137 |
+
with patch.object(tool.api, "list_jobs") as mock_list:
|
| 138 |
+
mock_list.return_value = [running_job, completed_job]
|
| 139 |
+
|
| 140 |
+
# Test listing only running jobs (default)
|
| 141 |
+
result = await tool.execute({"operation": "ps"})
|
| 142 |
+
|
| 143 |
+
assert not result.get("isError", False)
|
| 144 |
+
assert "test-job-1" in result["formatted"]
|
| 145 |
+
assert "test-job-2" not in result["formatted"] # COMPLETED jobs filtered out
|
| 146 |
+
assert result["totalResults"] == 1
|
| 147 |
+
assert result["resultsShared"] == 1
|
| 148 |
+
|
| 149 |
+
# Test listing all jobs
|
| 150 |
+
result = await tool.execute({"operation": "ps", "args": {"all": True}})
|
| 151 |
+
|
| 152 |
+
assert not result.get("isError", False)
|
| 153 |
+
assert "test-job-1" in result["formatted"]
|
| 154 |
+
assert "test-job-2" in result["formatted"]
|
| 155 |
+
assert result["totalResults"] == 2
|
| 156 |
+
assert result["resultsShared"] == 2
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
@pytest.mark.asyncio
|
| 160 |
+
async def test_inspect_job_mock():
|
| 161 |
+
"""Test inspect job with mock API"""
|
| 162 |
+
tool = HfJobsTool()
|
| 163 |
+
|
| 164 |
+
mock_job = create_mock_job_info("test-job-1", "RUNNING")
|
| 165 |
+
|
| 166 |
+
with patch.object(tool.api, "inspect_job") as mock_inspect:
|
| 167 |
+
mock_inspect.return_value = mock_job
|
| 168 |
+
|
| 169 |
+
result = await tool.execute(
|
| 170 |
+
{"operation": "inspect", "args": {"job_id": "test-job-1"}}
|
| 171 |
+
)
|
| 172 |
+
|
| 173 |
+
assert not result.get("isError", False)
|
| 174 |
+
assert "test-job-1" in result["formatted"]
|
| 175 |
+
assert "Job Details" in result["formatted"]
|
| 176 |
+
mock_inspect.assert_called_once()
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
@pytest.mark.asyncio
|
| 180 |
+
async def test_cancel_job_mock():
|
| 181 |
+
"""Test cancel job with mock API"""
|
| 182 |
+
tool = HfJobsTool()
|
| 183 |
+
|
| 184 |
+
with patch.object(tool.api, "cancel_job") as mock_cancel:
|
| 185 |
+
mock_cancel.return_value = None
|
| 186 |
+
|
| 187 |
+
result = await tool.execute(
|
| 188 |
+
{"operation": "cancel", "args": {"job_id": "test-job-1"}}
|
| 189 |
+
)
|
| 190 |
+
|
| 191 |
+
assert not result.get("isError", False)
|
| 192 |
+
assert "cancelled" in result["formatted"]
|
| 193 |
+
assert "test-job-1" in result["formatted"]
|
| 194 |
+
mock_cancel.assert_called_once()
|
| 195 |
+
|
| 196 |
+
|
| 197 |
+
@pytest.mark.asyncio
|
| 198 |
+
async def test_run_job_mock():
|
| 199 |
+
"""Test run job with mock API"""
|
| 200 |
+
tool = HfJobsTool()
|
| 201 |
+
|
| 202 |
+
mock_job = create_mock_job_info("new-job-123", "RUNNING")
|
| 203 |
+
|
| 204 |
+
with patch.object(tool.api, "run_job") as mock_run:
|
| 205 |
+
mock_run.return_value = mock_job
|
| 206 |
+
|
| 207 |
+
result = await tool.execute(
|
| 208 |
+
{
|
| 209 |
+
"operation": "run",
|
| 210 |
+
"args": {
|
| 211 |
+
"image": "python:3.12",
|
| 212 |
+
"command": ["python", "-c", "print('test')"],
|
| 213 |
+
"flavor": "cpu-basic",
|
| 214 |
+
"detach": True,
|
| 215 |
+
},
|
| 216 |
+
}
|
| 217 |
+
)
|
| 218 |
+
|
| 219 |
+
assert not result.get("isError", False)
|
| 220 |
+
assert "new-job-123" in result["formatted"]
|
| 221 |
+
assert "Job started" in result["formatted"]
|
| 222 |
+
mock_run.assert_called_once()
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
@pytest.mark.asyncio
|
| 226 |
+
async def test_run_uv_job_mock():
|
| 227 |
+
"""Test run UV job with mock API"""
|
| 228 |
+
tool = HfJobsTool()
|
| 229 |
+
|
| 230 |
+
mock_job = create_mock_job_info("uv-job-456", "RUNNING")
|
| 231 |
+
|
| 232 |
+
with patch.object(tool.api, "run_uv_job") as mock_run:
|
| 233 |
+
mock_run.return_value = mock_job
|
| 234 |
+
|
| 235 |
+
result = await tool.execute(
|
| 236 |
+
{
|
| 237 |
+
"operation": "uv",
|
| 238 |
+
"args": {
|
| 239 |
+
"script": "print('Hello UV')",
|
| 240 |
+
"flavor": "cpu-basic",
|
| 241 |
+
},
|
| 242 |
+
}
|
| 243 |
+
)
|
| 244 |
+
|
| 245 |
+
assert not result.get("isError", False)
|
| 246 |
+
assert "uv-job-456" in result["formatted"]
|
| 247 |
+
assert "UV Job started" in result["formatted"]
|
| 248 |
+
mock_run.assert_called_once()
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
@pytest.mark.asyncio
|
| 252 |
+
async def test_get_logs_mock():
|
| 253 |
+
"""Test get logs with mock API"""
|
| 254 |
+
tool = HfJobsTool()
|
| 255 |
+
|
| 256 |
+
# Mock fetch_job_logs to return a generator
|
| 257 |
+
def log_generator():
|
| 258 |
+
yield "Log line 1"
|
| 259 |
+
yield "Log line 2"
|
| 260 |
+
yield "Hello from HF Jobs!"
|
| 261 |
+
|
| 262 |
+
with patch.object(tool.api, "fetch_job_logs") as mock_logs:
|
| 263 |
+
mock_logs.return_value = log_generator()
|
| 264 |
+
|
| 265 |
+
result = await tool.execute(
|
| 266 |
+
{"operation": "logs", "args": {"job_id": "test-job-1"}}
|
| 267 |
+
)
|
| 268 |
+
|
| 269 |
+
assert not result.get("isError", False)
|
| 270 |
+
assert "Log line 1" in result["formatted"]
|
| 271 |
+
assert "Hello from HF Jobs!" in result["formatted"]
|
| 272 |
+
|
| 273 |
+
|
| 274 |
+
@pytest.mark.asyncio
|
| 275 |
+
async def test_handler():
|
| 276 |
+
"""Test the handler function"""
|
| 277 |
+
with patch("agent.tools.jobs_tool.HfJobsTool") as MockTool:
|
| 278 |
+
mock_tool_instance = MockTool.return_value
|
| 279 |
+
mock_tool_instance.execute = AsyncMock(
|
| 280 |
+
return_value={
|
| 281 |
+
"formatted": "Test output",
|
| 282 |
+
"totalResults": 1,
|
| 283 |
+
"resultsShared": 1,
|
| 284 |
+
"isError": False,
|
| 285 |
+
}
|
| 286 |
+
)
|
| 287 |
+
|
| 288 |
+
output, success = await hf_jobs_handler({"operation": "ps"})
|
| 289 |
+
|
| 290 |
+
assert success == True
|
| 291 |
+
assert "Test output" in output
|
| 292 |
+
|
| 293 |
+
|
| 294 |
+
@pytest.mark.asyncio
|
| 295 |
+
async def test_handler_error():
|
| 296 |
+
"""Test handler with error"""
|
| 297 |
+
with patch("agent.tools.jobs_tool.HfJobsTool") as MockTool:
|
| 298 |
+
MockTool.side_effect = Exception("Test error")
|
| 299 |
+
|
| 300 |
+
output, success = await hf_jobs_handler({})
|
| 301 |
+
|
| 302 |
+
assert success == False
|
| 303 |
+
assert "Error" in output
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
@pytest.mark.asyncio
|
| 307 |
+
async def test_scheduled_jobs_mock():
|
| 308 |
+
"""Test scheduled jobs operations with mock API"""
|
| 309 |
+
tool = HfJobsTool()
|
| 310 |
+
|
| 311 |
+
mock_scheduled_job = create_mock_scheduled_job_info()
|
| 312 |
+
|
| 313 |
+
# Test list scheduled jobs
|
| 314 |
+
with patch.object(tool.api, "list_scheduled_jobs") as mock_list:
|
| 315 |
+
mock_list.return_value = [mock_scheduled_job]
|
| 316 |
+
|
| 317 |
+
result = await tool.execute({"operation": "scheduled ps"})
|
| 318 |
+
|
| 319 |
+
assert not result.get("isError", False)
|
| 320 |
+
assert "sched-job-1" in result["formatted"]
|
| 321 |
+
assert "Scheduled Jobs" in result["formatted"]
|
| 322 |
+
|
| 323 |
+
|
| 324 |
+
@pytest.mark.asyncio
|
| 325 |
+
async def test_create_scheduled_job_mock():
|
| 326 |
+
"""Test create scheduled job with mock API"""
|
| 327 |
+
tool = HfJobsTool()
|
| 328 |
+
|
| 329 |
+
mock_scheduled_job = create_mock_scheduled_job_info()
|
| 330 |
+
|
| 331 |
+
with patch.object(tool.api, "create_scheduled_job") as mock_create:
|
| 332 |
+
mock_create.return_value = mock_scheduled_job
|
| 333 |
+
|
| 334 |
+
result = await tool.execute(
|
| 335 |
+
{
|
| 336 |
+
"operation": "scheduled run",
|
| 337 |
+
"args": {
|
| 338 |
+
"image": "python:3.12",
|
| 339 |
+
"command": ["python", "backup.py"],
|
| 340 |
+
"schedule": "@daily",
|
| 341 |
+
"flavor": "cpu-basic",
|
| 342 |
+
},
|
| 343 |
+
}
|
| 344 |
+
)
|
| 345 |
+
|
| 346 |
+
assert not result.get("isError", False)
|
| 347 |
+
assert "sched-job-1" in result["formatted"]
|
| 348 |
+
assert "Scheduled job created" in result["formatted"]
|
| 349 |
+
mock_create.assert_called_once()
|
| 350 |
+
|
| 351 |
+
|
| 352 |
+
@pytest.mark.asyncio
|
| 353 |
+
async def test_inspect_scheduled_job_mock():
|
| 354 |
+
"""Test inspect scheduled job with mock API"""
|
| 355 |
+
tool = HfJobsTool()
|
| 356 |
+
|
| 357 |
+
mock_scheduled_job = create_mock_scheduled_job_info()
|
| 358 |
+
|
| 359 |
+
with patch.object(tool.api, "inspect_scheduled_job") as mock_inspect:
|
| 360 |
+
mock_inspect.return_value = mock_scheduled_job
|
| 361 |
+
|
| 362 |
+
result = await tool.execute(
|
| 363 |
+
{
|
| 364 |
+
"operation": "scheduled inspect",
|
| 365 |
+
"args": {"scheduled_job_id": "sched-job-1"},
|
| 366 |
+
}
|
| 367 |
+
)
|
| 368 |
+
|
| 369 |
+
assert not result.get("isError", False)
|
| 370 |
+
assert "sched-job-1" in result["formatted"]
|
| 371 |
+
assert "Scheduled Job Details" in result["formatted"]
|
| 372 |
+
|
| 373 |
+
|
| 374 |
+
@pytest.mark.asyncio
|
| 375 |
+
async def test_suspend_scheduled_job_mock():
|
| 376 |
+
"""Test suspend scheduled job with mock API"""
|
| 377 |
+
tool = HfJobsTool()
|
| 378 |
+
|
| 379 |
+
with patch.object(tool.api, "suspend_scheduled_job") as mock_suspend:
|
| 380 |
+
mock_suspend.return_value = None
|
| 381 |
+
|
| 382 |
+
result = await tool.execute(
|
| 383 |
+
{
|
| 384 |
+
"operation": "scheduled suspend",
|
| 385 |
+
"args": {"scheduled_job_id": "sched-job-1"},
|
| 386 |
+
}
|
| 387 |
+
)
|
| 388 |
+
|
| 389 |
+
assert not result.get("isError", False)
|
| 390 |
+
assert "suspended" in result["formatted"]
|
| 391 |
+
assert "sched-job-1" in result["formatted"]
|
| 392 |
+
|
| 393 |
+
|
| 394 |
+
@pytest.mark.asyncio
|
| 395 |
+
async def test_resume_scheduled_job_mock():
|
| 396 |
+
"""Test resume scheduled job with mock API"""
|
| 397 |
+
tool = HfJobsTool()
|
| 398 |
+
|
| 399 |
+
with patch.object(tool.api, "resume_scheduled_job") as mock_resume:
|
| 400 |
+
mock_resume.return_value = None
|
| 401 |
+
|
| 402 |
+
result = await tool.execute(
|
| 403 |
+
{
|
| 404 |
+
"operation": "scheduled resume",
|
| 405 |
+
"args": {"scheduled_job_id": "sched-job-1"},
|
| 406 |
+
}
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
assert not result.get("isError", False)
|
| 410 |
+
assert "resumed" in result["formatted"]
|
| 411 |
+
assert "sched-job-1" in result["formatted"]
|
| 412 |
+
|
| 413 |
+
|
| 414 |
+
@pytest.mark.asyncio
|
| 415 |
+
async def test_delete_scheduled_job_mock():
|
| 416 |
+
"""Test delete scheduled job with mock API"""
|
| 417 |
+
tool = HfJobsTool()
|
| 418 |
+
|
| 419 |
+
with patch.object(tool.api, "delete_scheduled_job") as mock_delete:
|
| 420 |
+
mock_delete.return_value = None
|
| 421 |
+
|
| 422 |
+
result = await tool.execute(
|
| 423 |
+
{
|
| 424 |
+
"operation": "scheduled delete",
|
| 425 |
+
"args": {"scheduled_job_id": "sched-job-1"},
|
| 426 |
+
}
|
| 427 |
+
)
|
| 428 |
+
|
| 429 |
+
assert not result.get("isError", False)
|
| 430 |
+
assert "deleted" in result["formatted"]
|
| 431 |
+
assert "sched-job-1" in result["formatted"]
|
| 432 |
+
|
| 433 |
+
|
| 434 |
+
@pytest.mark.asyncio
|
| 435 |
+
async def test_list_jobs_with_status_filter():
|
| 436 |
+
"""Test list jobs with status filter"""
|
| 437 |
+
tool = HfJobsTool()
|
| 438 |
+
|
| 439 |
+
running_job = create_mock_job_info("job-1", "RUNNING")
|
| 440 |
+
completed_job = create_mock_job_info("job-2", "COMPLETED")
|
| 441 |
+
error_job = create_mock_job_info("job-3", "ERROR")
|
| 442 |
+
|
| 443 |
+
with patch.object(tool.api, "list_jobs") as mock_list:
|
| 444 |
+
mock_list.return_value = [running_job, completed_job, error_job]
|
| 445 |
+
|
| 446 |
+
# Filter by status
|
| 447 |
+
result = await tool.execute(
|
| 448 |
+
{"operation": "ps", "args": {"all": True, "status": "ERROR"}}
|
| 449 |
+
)
|
| 450 |
+
|
| 451 |
+
assert not result.get("isError", False)
|
| 452 |
+
assert "job-3" in result["formatted"]
|
| 453 |
+
assert "job-1" not in result["formatted"]
|
| 454 |
+
assert result["resultsShared"] == 1
|
uv.lock
CHANGED
|
@@ -893,6 +893,7 @@ dependencies = [
|
|
| 893 |
{ name = "numpy" },
|
| 894 |
{ name = "pandas" },
|
| 895 |
{ name = "pydantic" },
|
|
|
|
| 896 |
{ name = "python-dotenv" },
|
| 897 |
{ name = "requests" },
|
| 898 |
{ name = "tenacity" },
|
|
@@ -911,6 +912,7 @@ requires-dist = [
|
|
| 911 |
{ name = "numpy", specifier = ">=1.24.0" },
|
| 912 |
{ name = "pandas", specifier = ">=2.3.3" },
|
| 913 |
{ name = "pydantic", specifier = ">=2.12.3" },
|
|
|
|
| 914 |
{ name = "python-dotenv", specifier = ">=1.2.1" },
|
| 915 |
{ name = "requests", specifier = ">=2.32.5" },
|
| 916 |
{ name = "tenacity", specifier = ">=8.0.0" },
|
|
@@ -1098,6 +1100,15 @@ wheels = [
|
|
| 1098 |
{ url = "https://files.pythonhosted.org/packages/59/91/aa6bde563e0085a02a435aa99b49ef75b0a4b062635e606dab23ce18d720/inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2", size = 9454, upload-time = "2020-08-22T08:16:27.816Z" },
|
| 1099 |
]
|
| 1100 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1101 |
[[package]]
|
| 1102 |
name = "inspect-ai"
|
| 1103 |
version = "0.3.149"
|
|
@@ -2703,6 +2714,15 @@ wheels = [
|
|
| 2703 |
{ url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
|
| 2704 |
]
|
| 2705 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2706 |
[[package]]
|
| 2707 |
name = "ply"
|
| 2708 |
version = "3.11"
|
|
@@ -3064,6 +3084,22 @@ wheels = [
|
|
| 3064 |
{ url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z" },
|
| 3065 |
]
|
| 3066 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3067 |
[[package]]
|
| 3068 |
name = "python-dateutil"
|
| 3069 |
version = "2.9.0.post0"
|
|
|
|
| 893 |
{ name = "numpy" },
|
| 894 |
{ name = "pandas" },
|
| 895 |
{ name = "pydantic" },
|
| 896 |
+
{ name = "pytest" },
|
| 897 |
{ name = "python-dotenv" },
|
| 898 |
{ name = "requests" },
|
| 899 |
{ name = "tenacity" },
|
|
|
|
| 912 |
{ name = "numpy", specifier = ">=1.24.0" },
|
| 913 |
{ name = "pandas", specifier = ">=2.3.3" },
|
| 914 |
{ name = "pydantic", specifier = ">=2.12.3" },
|
| 915 |
+
{ name = "pytest", specifier = ">=9.0.2" },
|
| 916 |
{ name = "python-dotenv", specifier = ">=1.2.1" },
|
| 917 |
{ name = "requests", specifier = ">=2.32.5" },
|
| 918 |
{ name = "tenacity", specifier = ">=8.0.0" },
|
|
|
|
| 1100 |
{ url = "https://files.pythonhosted.org/packages/59/91/aa6bde563e0085a02a435aa99b49ef75b0a4b062635e606dab23ce18d720/inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2", size = 9454, upload-time = "2020-08-22T08:16:27.816Z" },
|
| 1101 |
]
|
| 1102 |
|
| 1103 |
+
[[package]]
|
| 1104 |
+
name = "iniconfig"
|
| 1105 |
+
version = "2.3.0"
|
| 1106 |
+
source = { registry = "https://pypi.org/simple" }
|
| 1107 |
+
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
|
| 1108 |
+
wheels = [
|
| 1109 |
+
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
|
| 1110 |
+
]
|
| 1111 |
+
|
| 1112 |
[[package]]
|
| 1113 |
name = "inspect-ai"
|
| 1114 |
version = "0.3.149"
|
|
|
|
| 2714 |
{ url = "https://files.pythonhosted.org/packages/73/cb/ac7874b3e5d58441674fb70742e6c374b28b0c7cb988d37d991cde47166c/platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3", size = 18651, upload-time = "2025-10-08T17:44:47.223Z" },
|
| 2715 |
]
|
| 2716 |
|
| 2717 |
+
[[package]]
|
| 2718 |
+
name = "pluggy"
|
| 2719 |
+
version = "1.6.0"
|
| 2720 |
+
source = { registry = "https://pypi.org/simple" }
|
| 2721 |
+
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
|
| 2722 |
+
wheels = [
|
| 2723 |
+
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
|
| 2724 |
+
]
|
| 2725 |
+
|
| 2726 |
[[package]]
|
| 2727 |
name = "ply"
|
| 2728 |
version = "3.11"
|
|
|
|
| 3084 |
{ url = "https://files.pythonhosted.org/packages/df/80/fc9d01d5ed37ba4c42ca2b55b4339ae6e200b456be3a1aaddf4a9fa99b8c/pyperclip-1.11.0-py3-none-any.whl", hash = "sha256:299403e9ff44581cb9ba2ffeed69c7aa96a008622ad0c46cb575ca75b5b84273", size = 11063, upload-time = "2025-09-26T14:40:36.069Z" },
|
| 3085 |
]
|
| 3086 |
|
| 3087 |
+
[[package]]
|
| 3088 |
+
name = "pytest"
|
| 3089 |
+
version = "9.0.2"
|
| 3090 |
+
source = { registry = "https://pypi.org/simple" }
|
| 3091 |
+
dependencies = [
|
| 3092 |
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
| 3093 |
+
{ name = "iniconfig" },
|
| 3094 |
+
{ name = "packaging" },
|
| 3095 |
+
{ name = "pluggy" },
|
| 3096 |
+
{ name = "pygments" },
|
| 3097 |
+
]
|
| 3098 |
+
sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" }
|
| 3099 |
+
wheels = [
|
| 3100 |
+
{ url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" },
|
| 3101 |
+
]
|
| 3102 |
+
|
| 3103 |
[[package]]
|
| 3104 |
name = "python-dateutil"
|
| 3105 |
version = "2.9.0.post0"
|