Spaces:
Running
Running
Commit
·
4d8a789
1
Parent(s):
ea67d9f
custom hf jobs tool impl
Browse files- agent/config_local_mcp.json +16 -0
- agent/core/tools.py +7 -0
- agent/tools/hf/__init__.py +7 -0
- agent/tools/hf/base.py +84 -0
- agent/tools/hf/jobs/__init__.py +6 -0
- agent/tools/hf/jobs/api_client.py +156 -0
- agent/tools/hf/jobs/job_utils.py +130 -0
- agent/tools/hf/jobs/jobs_tool.py +652 -0
- agent/tools/hf/jobs/uv_utils.py +66 -0
- agent/tools/hf/types.py +14 -0
- agent/tools/hf/utilities.py +133 -0
- tests/tools/__init__.py +1 -0
- tests/tools/hf/__init__.py +1 -0
- tests/tools/hf/jobs/__init__.py +1 -0
- tests/tools/hf/jobs/test_jobs_tool.py +252 -0
agent/config_local_mcp.json
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"model_name": "anthropic/claude-sonnet-4-5-20250929",
|
| 3 |
+
"tools": [],
|
| 4 |
+
"system_prompt_path": "",
|
| 5 |
+
"mcpServers": {
|
| 6 |
+
"hf-mcp-server": {
|
| 7 |
+
"command": "node",
|
| 8 |
+
"args": [
|
| 9 |
+
"/Users/akseljoonas/Documents/hf-agent/hf-mcp-server/packages/app/dist/server/stdio.js"
|
| 10 |
+
],
|
| 11 |
+
"env": {
|
| 12 |
+
"DEFAULT_HF_TOKEN": "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"
|
| 13 |
+
}
|
| 14 |
+
}
|
| 15 |
+
}
|
| 16 |
+
}
|
agent/core/tools.py
CHANGED
|
@@ -13,6 +13,7 @@ from lmnr import observe
|
|
| 13 |
from mcp.types import EmbeddedResource, ImageContent, TextContent
|
| 14 |
|
| 15 |
from agent.config import MCPServerConfig
|
|
|
|
| 16 |
|
| 17 |
# Suppress aiohttp deprecation warning
|
| 18 |
warnings.filterwarnings(
|
|
@@ -262,4 +263,10 @@ def create_builtin_tools() -> list[ToolSpec]:
|
|
| 262 |
},
|
| 263 |
handler=write_file_handler,
|
| 264 |
),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 265 |
]
|
|
|
|
| 13 |
from mcp.types import EmbeddedResource, ImageContent, TextContent
|
| 14 |
|
| 15 |
from agent.config import MCPServerConfig
|
| 16 |
+
from agent.tools.hf.jobs import HF_JOBS_TOOL_SPEC, hf_jobs_handler
|
| 17 |
|
| 18 |
# Suppress aiohttp deprecation warning
|
| 19 |
warnings.filterwarnings(
|
|
|
|
| 263 |
},
|
| 264 |
handler=write_file_handler,
|
| 265 |
),
|
| 266 |
+
ToolSpec(
|
| 267 |
+
name=HF_JOBS_TOOL_SPEC["name"],
|
| 268 |
+
description=HF_JOBS_TOOL_SPEC["description"],
|
| 269 |
+
parameters=HF_JOBS_TOOL_SPEC["parameters"],
|
| 270 |
+
handler=hf_jobs_handler,
|
| 271 |
+
),
|
| 272 |
]
|
agent/tools/hf/__init__.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Hugging Face tools for the agent
|
| 3 |
+
"""
|
| 4 |
+
from agent.tools.hf.types import ToolResult
|
| 5 |
+
from agent.tools.hf.base import HfApiCall, HfApiError
|
| 6 |
+
|
| 7 |
+
__all__ = ['ToolResult', 'HfApiCall', 'HfApiError']
|
agent/tools/hf/base.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Base API client for Hugging Face API
|
| 3 |
+
|
| 4 |
+
Ported from: hf-mcp-server/packages/mcp/src/hf-api-call.ts
|
| 5 |
+
"""
|
| 6 |
+
import os
|
| 7 |
+
from typing import Optional, Dict, Any, TypeVar, Generic
|
| 8 |
+
import httpx
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
TResponse = TypeVar('TResponse')
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class HfApiError(Exception):
|
| 15 |
+
"""Error from Hugging Face API"""
|
| 16 |
+
|
| 17 |
+
def __init__(
|
| 18 |
+
self,
|
| 19 |
+
message: str,
|
| 20 |
+
status: Optional[int] = None,
|
| 21 |
+
status_text: Optional[str] = None,
|
| 22 |
+
response_body: Optional[str] = None
|
| 23 |
+
):
|
| 24 |
+
super().__init__(message)
|
| 25 |
+
self.message = message
|
| 26 |
+
self.status = status
|
| 27 |
+
self.status_text = status_text
|
| 28 |
+
self.response_body = response_body
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class HfApiCall(Generic[TResponse]):
|
| 32 |
+
"""Base class for making authenticated API calls to Hugging Face"""
|
| 33 |
+
|
| 34 |
+
def __init__(
|
| 35 |
+
self,
|
| 36 |
+
api_url: str,
|
| 37 |
+
hf_token: Optional[str] = None,
|
| 38 |
+
api_timeout: Optional[float] = None
|
| 39 |
+
):
|
| 40 |
+
self.api_url = api_url
|
| 41 |
+
self.hf_token = hf_token or os.getenv('HF_TOKEN')
|
| 42 |
+
self.api_timeout = api_timeout or float(os.getenv('HF_API_TIMEOUT', '12.5'))
|
| 43 |
+
|
| 44 |
+
async def fetch_from_api(
|
| 45 |
+
self,
|
| 46 |
+
url: str,
|
| 47 |
+
method: str = "GET",
|
| 48 |
+
json: Optional[Dict[str, Any]] = None,
|
| 49 |
+
**kwargs
|
| 50 |
+
) -> Optional[Any]:
|
| 51 |
+
"""Fetch data from API with auth and error handling"""
|
| 52 |
+
headers = {
|
| 53 |
+
"Accept": "application/json",
|
| 54 |
+
**kwargs.pop("headers", {})
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
if self.hf_token:
|
| 58 |
+
headers["Authorization"] = f"Bearer {self.hf_token}"
|
| 59 |
+
|
| 60 |
+
async with httpx.AsyncClient(timeout=self.api_timeout) as client:
|
| 61 |
+
if method == "GET":
|
| 62 |
+
response = await client.get(url, headers=headers, **kwargs)
|
| 63 |
+
elif method == "POST":
|
| 64 |
+
response = await client.post(url, headers=headers, json=json, **kwargs)
|
| 65 |
+
elif method == "DELETE":
|
| 66 |
+
response = await client.delete(url, headers=headers, **kwargs)
|
| 67 |
+
elif method == "PUT":
|
| 68 |
+
response = await client.put(url, headers=headers, json=json, **kwargs)
|
| 69 |
+
else:
|
| 70 |
+
raise ValueError(f"Unsupported HTTP method: {method}")
|
| 71 |
+
|
| 72 |
+
if not response.is_success:
|
| 73 |
+
raise HfApiError(
|
| 74 |
+
message=f"API request failed: {response.status_code} {response.reason_phrase}",
|
| 75 |
+
status=response.status_code,
|
| 76 |
+
status_text=response.reason_phrase,
|
| 77 |
+
response_body=response.text
|
| 78 |
+
)
|
| 79 |
+
|
| 80 |
+
# Handle empty responses (DELETE often returns empty)
|
| 81 |
+
if not response.text:
|
| 82 |
+
return None
|
| 83 |
+
|
| 84 |
+
return response.json()
|
agent/tools/hf/jobs/__init__.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Hugging Face Jobs tool
|
| 3 |
+
"""
|
| 4 |
+
from agent.tools.hf.jobs.jobs_tool import HfJobsTool, HF_JOBS_TOOL_SPEC, hf_jobs_handler
|
| 5 |
+
|
| 6 |
+
__all__ = ['HfJobsTool', 'HF_JOBS_TOOL_SPEC', 'hf_jobs_handler']
|
agent/tools/hf/jobs/api_client.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Jobs API Client
|
| 3 |
+
|
| 4 |
+
Ported from: hf-mcp-server/packages/mcp/src/jobs/api-client.ts
|
| 5 |
+
"""
|
| 6 |
+
from typing import Optional, Dict, Any, List
|
| 7 |
+
from agent.tools.hf.base import HfApiCall
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class JobsApiClient(HfApiCall):
|
| 11 |
+
"""API client for HuggingFace Jobs API"""
|
| 12 |
+
|
| 13 |
+
def __init__(self, hf_token: Optional[str] = None, namespace: Optional[str] = None):
|
| 14 |
+
super().__init__('https://huggingface.co/api', hf_token)
|
| 15 |
+
self.namespace_cache = namespace
|
| 16 |
+
|
| 17 |
+
async def get_namespace(self, namespace: Optional[str] = None) -> str:
|
| 18 |
+
"""
|
| 19 |
+
Get the namespace (username or org) for the current user
|
| 20 |
+
Uses cached value or /api/whoami-v2 endpoint as fallback
|
| 21 |
+
"""
|
| 22 |
+
if namespace:
|
| 23 |
+
return namespace
|
| 24 |
+
|
| 25 |
+
if self.namespace_cache:
|
| 26 |
+
return self.namespace_cache
|
| 27 |
+
|
| 28 |
+
# Fetch from whoami endpoint
|
| 29 |
+
whoami = await self.fetch_from_api('https://huggingface.co/api/whoami-v2')
|
| 30 |
+
self.namespace_cache = whoami['name']
|
| 31 |
+
return self.namespace_cache
|
| 32 |
+
|
| 33 |
+
async def run_job(self, job_spec: Dict[str, Any], namespace: Optional[str] = None) -> Dict[str, Any]:
|
| 34 |
+
"""
|
| 35 |
+
Run a job
|
| 36 |
+
POST /api/jobs/{namespace}
|
| 37 |
+
"""
|
| 38 |
+
ns = await self.get_namespace(namespace)
|
| 39 |
+
url = f'https://huggingface.co/api/jobs/{ns}'
|
| 40 |
+
|
| 41 |
+
result = await self.fetch_from_api(url, method='POST', json=job_spec)
|
| 42 |
+
return result
|
| 43 |
+
|
| 44 |
+
async def list_jobs(self, namespace: Optional[str] = None) -> List[Dict[str, Any]]:
|
| 45 |
+
"""
|
| 46 |
+
List all jobs for a namespace
|
| 47 |
+
GET /api/jobs/{namespace}
|
| 48 |
+
"""
|
| 49 |
+
ns = await self.get_namespace(namespace)
|
| 50 |
+
url = f'https://huggingface.co/api/jobs/{ns}'
|
| 51 |
+
|
| 52 |
+
return await self.fetch_from_api(url)
|
| 53 |
+
|
| 54 |
+
async def get_job(self, job_id: str, namespace: Optional[str] = None) -> Dict[str, Any]:
|
| 55 |
+
"""
|
| 56 |
+
Get detailed information about a specific job
|
| 57 |
+
GET /api/jobs/{namespace}/{jobId}
|
| 58 |
+
"""
|
| 59 |
+
ns = await self.get_namespace(namespace)
|
| 60 |
+
url = f'https://huggingface.co/api/jobs/{ns}/{job_id}'
|
| 61 |
+
|
| 62 |
+
return await self.fetch_from_api(url)
|
| 63 |
+
|
| 64 |
+
async def cancel_job(self, job_id: str, namespace: Optional[str] = None) -> None:
|
| 65 |
+
"""
|
| 66 |
+
Cancel a running job
|
| 67 |
+
POST /api/jobs/{namespace}/{jobId}/cancel
|
| 68 |
+
"""
|
| 69 |
+
ns = await self.get_namespace(namespace)
|
| 70 |
+
url = f'https://huggingface.co/api/jobs/{ns}/{job_id}/cancel'
|
| 71 |
+
|
| 72 |
+
await self.fetch_from_api(url, method='POST')
|
| 73 |
+
|
| 74 |
+
def get_logs_url(self, job_id: str, namespace: str) -> str:
|
| 75 |
+
"""Get logs URL for a job (for SSE streaming)"""
|
| 76 |
+
return f'https://huggingface.co/api/jobs/{namespace}/{job_id}/logs'
|
| 77 |
+
|
| 78 |
+
async def create_scheduled_job(
|
| 79 |
+
self,
|
| 80 |
+
spec: Dict[str, Any],
|
| 81 |
+
namespace: Optional[str] = None
|
| 82 |
+
) -> Dict[str, Any]:
|
| 83 |
+
"""
|
| 84 |
+
Create a scheduled job
|
| 85 |
+
POST /api/scheduled-jobs/{namespace}
|
| 86 |
+
"""
|
| 87 |
+
ns = await self.get_namespace(namespace)
|
| 88 |
+
url = f'https://huggingface.co/api/scheduled-jobs/{ns}'
|
| 89 |
+
|
| 90 |
+
return await self.fetch_from_api(url, method='POST', json=spec)
|
| 91 |
+
|
| 92 |
+
async def list_scheduled_jobs(self, namespace: Optional[str] = None) -> List[Dict[str, Any]]:
|
| 93 |
+
"""
|
| 94 |
+
List all scheduled jobs
|
| 95 |
+
GET /api/scheduled-jobs/{namespace}
|
| 96 |
+
"""
|
| 97 |
+
ns = await self.get_namespace(namespace)
|
| 98 |
+
url = f'https://huggingface.co/api/scheduled-jobs/{ns}'
|
| 99 |
+
|
| 100 |
+
return await self.fetch_from_api(url)
|
| 101 |
+
|
| 102 |
+
async def get_scheduled_job(
|
| 103 |
+
self,
|
| 104 |
+
scheduled_job_id: str,
|
| 105 |
+
namespace: Optional[str] = None
|
| 106 |
+
) -> Dict[str, Any]:
|
| 107 |
+
"""
|
| 108 |
+
Get details of a scheduled job
|
| 109 |
+
GET /api/scheduled-jobs/{namespace}/{scheduledJobId}
|
| 110 |
+
"""
|
| 111 |
+
ns = await self.get_namespace(namespace)
|
| 112 |
+
url = f'https://huggingface.co/api/scheduled-jobs/{ns}/{scheduled_job_id}'
|
| 113 |
+
|
| 114 |
+
return await self.fetch_from_api(url)
|
| 115 |
+
|
| 116 |
+
async def delete_scheduled_job(
|
| 117 |
+
self,
|
| 118 |
+
scheduled_job_id: str,
|
| 119 |
+
namespace: Optional[str] = None
|
| 120 |
+
) -> None:
|
| 121 |
+
"""
|
| 122 |
+
Delete a scheduled job
|
| 123 |
+
DELETE /api/scheduled-jobs/{namespace}/{scheduledJobId}
|
| 124 |
+
"""
|
| 125 |
+
ns = await self.get_namespace(namespace)
|
| 126 |
+
url = f'https://huggingface.co/api/scheduled-jobs/{ns}/{scheduled_job_id}'
|
| 127 |
+
|
| 128 |
+
await self.fetch_from_api(url, method='DELETE')
|
| 129 |
+
|
| 130 |
+
async def suspend_scheduled_job(
|
| 131 |
+
self,
|
| 132 |
+
scheduled_job_id: str,
|
| 133 |
+
namespace: Optional[str] = None
|
| 134 |
+
) -> None:
|
| 135 |
+
"""
|
| 136 |
+
Suspend a scheduled job
|
| 137 |
+
POST /api/scheduled-jobs/{namespace}/{scheduledJobId}/suspend
|
| 138 |
+
"""
|
| 139 |
+
ns = await self.get_namespace(namespace)
|
| 140 |
+
url = f'https://huggingface.co/api/scheduled-jobs/{ns}/{scheduled_job_id}/suspend'
|
| 141 |
+
|
| 142 |
+
await self.fetch_from_api(url, method='POST')
|
| 143 |
+
|
| 144 |
+
async def resume_scheduled_job(
|
| 145 |
+
self,
|
| 146 |
+
scheduled_job_id: str,
|
| 147 |
+
namespace: Optional[str] = None
|
| 148 |
+
) -> None:
|
| 149 |
+
"""
|
| 150 |
+
Resume a suspended scheduled job
|
| 151 |
+
POST /api/scheduled-jobs/{namespace}/{scheduledJobId}/resume
|
| 152 |
+
"""
|
| 153 |
+
ns = await self.get_namespace(namespace)
|
| 154 |
+
url = f'https://huggingface.co/api/scheduled-jobs/{ns}/{scheduled_job_id}/resume'
|
| 155 |
+
|
| 156 |
+
await self.fetch_from_api(url, method='POST')
|
agent/tools/hf/jobs/job_utils.py
ADDED
|
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Job utility functions
|
| 3 |
+
|
| 4 |
+
Ported from: hf-mcp-server/packages/mcp/src/jobs/commands/utils.ts
|
| 5 |
+
"""
|
| 6 |
+
import re
|
| 7 |
+
import shlex
|
| 8 |
+
from typing import Dict, Optional, Any, List, Union
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def parse_timeout(timeout: str) -> int:
|
| 12 |
+
"""Parse timeout string (e.g., "5m", "2h", "30s") to seconds"""
|
| 13 |
+
time_units = {
|
| 14 |
+
's': 1,
|
| 15 |
+
'm': 60,
|
| 16 |
+
'h': 3600,
|
| 17 |
+
'd': 86400,
|
| 18 |
+
}
|
| 19 |
+
|
| 20 |
+
match = re.match(r'^(\d+(?:\.\d+)?)(s|m|h|d)$', timeout)
|
| 21 |
+
if match:
|
| 22 |
+
value = float(match.group(1))
|
| 23 |
+
unit = match.group(2)
|
| 24 |
+
return int(value * time_units[unit])
|
| 25 |
+
|
| 26 |
+
# Try to parse as plain number (seconds)
|
| 27 |
+
try:
|
| 28 |
+
return int(timeout)
|
| 29 |
+
except ValueError:
|
| 30 |
+
raise ValueError(
|
| 31 |
+
f'Invalid timeout format: "{timeout}". Use format like "5m", "2h", "30s", or plain seconds.'
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def parse_image_source(image: str) -> Dict[str, Optional[str]]:
|
| 36 |
+
"""
|
| 37 |
+
Detect if image is a Space URL and extract spaceId
|
| 38 |
+
Returns {'dockerImage': ...} or {'spaceId': ...}
|
| 39 |
+
"""
|
| 40 |
+
space_prefixes = [
|
| 41 |
+
'https://huggingface.co/spaces/',
|
| 42 |
+
'https://hf.co/spaces/',
|
| 43 |
+
'huggingface.co/spaces/',
|
| 44 |
+
'hf.co/spaces/',
|
| 45 |
+
]
|
| 46 |
+
|
| 47 |
+
for prefix in space_prefixes:
|
| 48 |
+
if image.startswith(prefix):
|
| 49 |
+
return {'dockerImage': None, 'spaceId': image[len(prefix):]}
|
| 50 |
+
|
| 51 |
+
# Not a space, treat as docker image
|
| 52 |
+
return {'dockerImage': image, 'spaceId': None}
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
def parse_command(command: Union[str, List[str]]) -> Dict[str, Any]:
|
| 56 |
+
"""
|
| 57 |
+
Parse command string or array into command array
|
| 58 |
+
Uses shlex for POSIX-compliant parsing
|
| 59 |
+
"""
|
| 60 |
+
# If already an array, return as-is
|
| 61 |
+
if isinstance(command, list):
|
| 62 |
+
return {'command': command, 'arguments': []}
|
| 63 |
+
|
| 64 |
+
# Parse the command string using shlex for POSIX-compliant parsing
|
| 65 |
+
try:
|
| 66 |
+
string_args = shlex.split(command)
|
| 67 |
+
except ValueError as e:
|
| 68 |
+
raise ValueError(
|
| 69 |
+
f'Unsupported shell syntax in command: "{command}". '
|
| 70 |
+
f'Please use an array format for commands with complex shell operators. Error: {e}'
|
| 71 |
+
)
|
| 72 |
+
|
| 73 |
+
if len(string_args) == 0:
|
| 74 |
+
raise ValueError(f'Invalid command: "{command}". Command cannot be empty.')
|
| 75 |
+
|
| 76 |
+
return {'command': string_args, 'arguments': []}
|
| 77 |
+
|
| 78 |
+
|
| 79 |
+
def replace_token_placeholder(value: str, hf_token: Optional[str]) -> str:
|
| 80 |
+
"""Replace HF token placeholder with actual token if available"""
|
| 81 |
+
if not hf_token:
|
| 82 |
+
return value
|
| 83 |
+
|
| 84 |
+
if value in ('$HF_TOKEN', '${HF_TOKEN}'):
|
| 85 |
+
return hf_token
|
| 86 |
+
|
| 87 |
+
return value
|
| 88 |
+
|
| 89 |
+
|
| 90 |
+
def transform_env_map(
|
| 91 |
+
env_map: Optional[Dict[str, str]],
|
| 92 |
+
hf_token: Optional[str]
|
| 93 |
+
) -> Optional[Dict[str, str]]:
|
| 94 |
+
"""Transform environment map, replacing token placeholders"""
|
| 95 |
+
if not env_map:
|
| 96 |
+
return None
|
| 97 |
+
|
| 98 |
+
return {
|
| 99 |
+
key: replace_token_placeholder(value, hf_token)
|
| 100 |
+
for key, value in env_map.items()
|
| 101 |
+
}
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def create_job_spec(args: Dict[str, Any]) -> Dict[str, Any]:
|
| 105 |
+
"""Create a JobSpec from run command arguments"""
|
| 106 |
+
# Validate required fields
|
| 107 |
+
if not args.get('image'):
|
| 108 |
+
raise ValueError('image parameter is required. Provide a Docker image (e.g., "python:3.12") or Space URL.')
|
| 109 |
+
if not args.get('command'):
|
| 110 |
+
raise ValueError('command parameter is required. Provide a command as string or array.')
|
| 111 |
+
|
| 112 |
+
image_source = parse_image_source(args['image'])
|
| 113 |
+
command_parsed = parse_command(args['command'])
|
| 114 |
+
timeout_seconds = parse_timeout(args['timeout']) if args.get('timeout') else None
|
| 115 |
+
environment = transform_env_map(args.get('env'), args.get('hfToken')) or {}
|
| 116 |
+
secrets = transform_env_map(args.get('secrets'), args.get('hfToken')) or {}
|
| 117 |
+
|
| 118 |
+
spec = {
|
| 119 |
+
**{k: v for k, v in image_source.items() if v is not None},
|
| 120 |
+
'command': command_parsed['command'],
|
| 121 |
+
'arguments': command_parsed['arguments'],
|
| 122 |
+
'flavor': args.get('flavor', 'cpu-basic'),
|
| 123 |
+
'environment': environment,
|
| 124 |
+
'secrets': secrets,
|
| 125 |
+
}
|
| 126 |
+
|
| 127 |
+
if timeout_seconds is not None:
|
| 128 |
+
spec['timeoutSeconds'] = timeout_seconds
|
| 129 |
+
|
| 130 |
+
return spec
|
agent/tools/hf/jobs/jobs_tool.py
ADDED
|
@@ -0,0 +1,652 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Hugging Face Jobs Tool - Manage compute jobs on Hugging Face
|
| 3 |
+
|
| 4 |
+
Ported from: hf-mcp-server/packages/mcp/src/jobs/jobs-tool.ts
|
| 5 |
+
"""
|
| 6 |
+
import json
|
| 7 |
+
from typing import Optional, Dict, Any, List, Literal
|
| 8 |
+
from agent.tools.hf.types import ToolResult
|
| 9 |
+
from agent.tools.hf.base import HfApiError
|
| 10 |
+
from agent.tools.hf.jobs.api_client import JobsApiClient
|
| 11 |
+
from agent.tools.hf.jobs.job_utils import create_job_spec
|
| 12 |
+
from agent.tools.hf.jobs.uv_utils import resolve_uv_command, UV_DEFAULT_IMAGE
|
| 13 |
+
from agent.tools.hf.utilities import (
|
| 14 |
+
format_jobs_table,
|
| 15 |
+
format_scheduled_jobs_table,
|
| 16 |
+
format_job_details,
|
| 17 |
+
format_scheduled_job_details,
|
| 18 |
+
)
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
# Hardware flavors
|
| 22 |
+
CPU_FLAVORS = ['cpu-basic', 'cpu-upgrade', 'cpu-performance', 'cpu-xl']
|
| 23 |
+
GPU_FLAVORS = [
|
| 24 |
+
'sprx8', 'zero-a10g', 't4-small', 't4-medium', 'l4x1', 'l4x4',
|
| 25 |
+
'l40sx1', 'l40sx4', 'l40sx8', 'a10g-small', 'a10g-large',
|
| 26 |
+
'a10g-largex2', 'a10g-largex4', 'a100-large', 'h100', 'h100x8'
|
| 27 |
+
]
|
| 28 |
+
SPECIALIZED_FLAVORS = ['inf2x6']
|
| 29 |
+
ALL_FLAVORS = CPU_FLAVORS + GPU_FLAVORS + SPECIALIZED_FLAVORS
|
| 30 |
+
|
| 31 |
+
# Operation names
|
| 32 |
+
OperationType = Literal[
|
| 33 |
+
"run", "uv", "ps", "logs", "inspect", "cancel",
|
| 34 |
+
"scheduled run", "scheduled uv", "scheduled ps",
|
| 35 |
+
"scheduled inspect", "scheduled delete", "scheduled suspend", "scheduled resume"
|
| 36 |
+
]
|
| 37 |
+
|
| 38 |
+
# Constants
|
| 39 |
+
DEFAULT_LOG_WAIT_SECONDS = 10
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
class HfJobsTool:
|
| 43 |
+
"""Tool for managing Hugging Face compute jobs"""
|
| 44 |
+
|
| 45 |
+
def __init__(self, hf_token: Optional[str] = None, namespace: Optional[str] = None):
|
| 46 |
+
self.hf_token = hf_token
|
| 47 |
+
self.client = JobsApiClient(hf_token, namespace)
|
| 48 |
+
|
| 49 |
+
async def execute(self, params: Dict[str, Any]) -> ToolResult:
|
| 50 |
+
"""Execute the specified operation"""
|
| 51 |
+
operation = params.get('operation')
|
| 52 |
+
args = params.get('args', {})
|
| 53 |
+
|
| 54 |
+
# If no operation provided, return usage instructions
|
| 55 |
+
if not operation:
|
| 56 |
+
return self._show_help()
|
| 57 |
+
|
| 58 |
+
# Normalize operation name
|
| 59 |
+
operation = operation.lower()
|
| 60 |
+
|
| 61 |
+
# Check if help is requested
|
| 62 |
+
if args.get('help'):
|
| 63 |
+
return self._show_operation_help(operation)
|
| 64 |
+
|
| 65 |
+
try:
|
| 66 |
+
# Route to appropriate handler
|
| 67 |
+
if operation == "run":
|
| 68 |
+
return await self._run_job(args)
|
| 69 |
+
elif operation == "uv":
|
| 70 |
+
return await self._run_uv_job(args)
|
| 71 |
+
elif operation == "ps":
|
| 72 |
+
return await self._list_jobs(args)
|
| 73 |
+
elif operation == "logs":
|
| 74 |
+
return await self._get_logs(args)
|
| 75 |
+
elif operation == "inspect":
|
| 76 |
+
return await self._inspect_job(args)
|
| 77 |
+
elif operation == "cancel":
|
| 78 |
+
return await self._cancel_job(args)
|
| 79 |
+
elif operation == "scheduled run":
|
| 80 |
+
return await self._scheduled_run(args)
|
| 81 |
+
elif operation == "scheduled uv":
|
| 82 |
+
return await self._scheduled_uv(args)
|
| 83 |
+
elif operation == "scheduled ps":
|
| 84 |
+
return await self._list_scheduled_jobs(args)
|
| 85 |
+
elif operation == "scheduled inspect":
|
| 86 |
+
return await self._inspect_scheduled_job(args)
|
| 87 |
+
elif operation == "scheduled delete":
|
| 88 |
+
return await self._delete_scheduled_job(args)
|
| 89 |
+
elif operation == "scheduled suspend":
|
| 90 |
+
return await self._suspend_scheduled_job(args)
|
| 91 |
+
elif operation == "scheduled resume":
|
| 92 |
+
return await self._resume_scheduled_job(args)
|
| 93 |
+
else:
|
| 94 |
+
return {
|
| 95 |
+
"formatted": f'Unknown operation: "{operation}"\n\n'
|
| 96 |
+
'Available operations:\n'
|
| 97 |
+
'- run, uv, ps, logs, inspect, cancel\n'
|
| 98 |
+
'- scheduled run, scheduled uv, scheduled ps, scheduled inspect, '
|
| 99 |
+
'scheduled delete, scheduled suspend, scheduled resume\n\n'
|
| 100 |
+
'Call this tool with no operation for full usage instructions.',
|
| 101 |
+
"totalResults": 0,
|
| 102 |
+
"resultsShared": 0,
|
| 103 |
+
"isError": True
|
| 104 |
+
}
|
| 105 |
+
|
| 106 |
+
except HfApiError as e:
|
| 107 |
+
error_message = f"API Error: {e.message}"
|
| 108 |
+
if e.response_body:
|
| 109 |
+
try:
|
| 110 |
+
parsed = json.loads(e.response_body)
|
| 111 |
+
formatted_body = json.dumps(parsed, indent=2)
|
| 112 |
+
error_message += f"\n\nServer response:\n{formatted_body}"
|
| 113 |
+
except Exception:
|
| 114 |
+
if len(e.response_body) < 500:
|
| 115 |
+
error_message += f"\n\nServer response: {e.response_body}"
|
| 116 |
+
|
| 117 |
+
return {
|
| 118 |
+
"formatted": error_message,
|
| 119 |
+
"totalResults": 0,
|
| 120 |
+
"resultsShared": 0,
|
| 121 |
+
"isError": True
|
| 122 |
+
}
|
| 123 |
+
except Exception as e:
|
| 124 |
+
return {
|
| 125 |
+
"formatted": f"Error executing {operation}: {str(e)}",
|
| 126 |
+
"totalResults": 0,
|
| 127 |
+
"resultsShared": 0,
|
| 128 |
+
"isError": True
|
| 129 |
+
}
|
| 130 |
+
|
| 131 |
+
def _show_help(self) -> ToolResult:
|
| 132 |
+
"""Show usage instructions when tool is called with no arguments"""
|
| 133 |
+
cpu_flavors_list = ', '.join(CPU_FLAVORS)
|
| 134 |
+
gpu_flavors_list = ', '.join(GPU_FLAVORS)
|
| 135 |
+
specialized_flavors_list = ', '.join(SPECIALIZED_FLAVORS)
|
| 136 |
+
|
| 137 |
+
hardware_section = f"**CPU:** {cpu_flavors_list}\n"
|
| 138 |
+
if GPU_FLAVORS:
|
| 139 |
+
hardware_section += f"**GPU:** {gpu_flavors_list}\n"
|
| 140 |
+
if SPECIALIZED_FLAVORS:
|
| 141 |
+
hardware_section += f"**Specialized:** {specialized_flavors_list}"
|
| 142 |
+
|
| 143 |
+
usage_text = f"""# HuggingFace Jobs API
|
| 144 |
+
|
| 145 |
+
Manage compute jobs on Hugging Face infrastructure.
|
| 146 |
+
|
| 147 |
+
## Available Commands
|
| 148 |
+
|
| 149 |
+
### Job Management
|
| 150 |
+
- **run** - Run a job with a Docker image
|
| 151 |
+
- **uv** - Run a Python script with UV (inline dependencies)
|
| 152 |
+
- **ps** - List jobs
|
| 153 |
+
- **logs** - Fetch job logs
|
| 154 |
+
- **inspect** - Get detailed job information
|
| 155 |
+
- **cancel** - Cancel a running job
|
| 156 |
+
|
| 157 |
+
### Scheduled Jobs
|
| 158 |
+
- **scheduled run** - Create a scheduled job
|
| 159 |
+
- **scheduled uv** - Create a scheduled UV job
|
| 160 |
+
- **scheduled ps** - List scheduled jobs
|
| 161 |
+
- **scheduled inspect** - Get scheduled job details
|
| 162 |
+
- **scheduled delete** - Delete a scheduled job
|
| 163 |
+
- **scheduled suspend** - Pause a scheduled job
|
| 164 |
+
- **scheduled resume** - Resume a suspended job
|
| 165 |
+
|
| 166 |
+
## Examples
|
| 167 |
+
|
| 168 |
+
### Run a simple job
|
| 169 |
+
Call this tool with:
|
| 170 |
+
```json
|
| 171 |
+
{{
|
| 172 |
+
"operation": "run",
|
| 173 |
+
"args": {{
|
| 174 |
+
"image": "python:3.12",
|
| 175 |
+
"command": ["python", "-c", "print('Hello from HF Jobs!')"],
|
| 176 |
+
"flavor": "cpu-basic"
|
| 177 |
+
}}
|
| 178 |
+
}}
|
| 179 |
+
```
|
| 180 |
+
|
| 181 |
+
### Run a Python script with UV
|
| 182 |
+
Call this tool with:
|
| 183 |
+
```json
|
| 184 |
+
{{
|
| 185 |
+
"operation": "uv",
|
| 186 |
+
"args": {{
|
| 187 |
+
"script": "import random\\nprint(42 + random.randint(1, 5))"
|
| 188 |
+
}}
|
| 189 |
+
}}
|
| 190 |
+
```
|
| 191 |
+
|
| 192 |
+
## Hardware Flavors
|
| 193 |
+
|
| 194 |
+
{hardware_section}
|
| 195 |
+
|
| 196 |
+
## Command Format Guidelines
|
| 197 |
+
|
| 198 |
+
**Array format (default):**
|
| 199 |
+
- Recommended for every command—JSON keeps arguments intact (URLs with `&`, spaces, etc.)
|
| 200 |
+
- Use `["/bin/sh", "-lc", "..."]` when you need shell operators like `&&`, `|`, or redirections
|
| 201 |
+
- Works with any language: Python, bash, node, npm, uv, etc.
|
| 202 |
+
|
| 203 |
+
**String format (simple cases only):**
|
| 204 |
+
- Still accepted for backwards compatibility, parsed with POSIX shell semantics
|
| 205 |
+
- Rejects shell operators and can mis-handle characters such as `&`; switch to arrays when things turn complex
|
| 206 |
+
- `$HF_TOKEN` stays literal—forward it via `secrets: {{ "HF_TOKEN": "$HF_TOKEN" }}`
|
| 207 |
+
|
| 208 |
+
### Show command-specific help
|
| 209 |
+
Call this tool with:
|
| 210 |
+
```json
|
| 211 |
+
{{"operation": "<operation>", "args": {{"help": true}}}}
|
| 212 |
+
```
|
| 213 |
+
|
| 214 |
+
## Tips
|
| 215 |
+
|
| 216 |
+
- Jobs default to non-detached mode (tail logs for up to {DEFAULT_LOG_WAIT_SECONDS}s or until completion). Set `detach: true` to return immediately.
|
| 217 |
+
- Prefer array commands to avoid shell parsing surprises
|
| 218 |
+
- To access private Hub assets, include `secrets: {{ "HF_TOKEN": "$HF_TOKEN" }}` to inject your auth token.
|
| 219 |
+
"""
|
| 220 |
+
return {
|
| 221 |
+
"formatted": usage_text,
|
| 222 |
+
"totalResults": 1,
|
| 223 |
+
"resultsShared": 1
|
| 224 |
+
}
|
| 225 |
+
|
| 226 |
+
def _show_operation_help(self, operation: str) -> ToolResult:
|
| 227 |
+
"""Show help for a specific operation"""
|
| 228 |
+
help_text = f"Help for operation: {operation}\n\nCall with appropriate arguments. Use the main help for examples."
|
| 229 |
+
return {
|
| 230 |
+
"formatted": help_text,
|
| 231 |
+
"totalResults": 1,
|
| 232 |
+
"resultsShared": 1
|
| 233 |
+
}
|
| 234 |
+
|
| 235 |
+
async def _run_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 236 |
+
"""Create and run a job"""
|
| 237 |
+
# Create job spec from args
|
| 238 |
+
job_spec = create_job_spec({
|
| 239 |
+
'image': args.get('image', 'python:3.12'),
|
| 240 |
+
'command': args.get('command'),
|
| 241 |
+
'flavor': args.get('flavor', 'cpu-basic'),
|
| 242 |
+
'env': args.get('env'),
|
| 243 |
+
'secrets': args.get('secrets'),
|
| 244 |
+
'timeout': args.get('timeout', '30m'),
|
| 245 |
+
'hfToken': self.hf_token,
|
| 246 |
+
})
|
| 247 |
+
|
| 248 |
+
# Submit job
|
| 249 |
+
job = await self.client.run_job(job_spec, args.get('namespace'))
|
| 250 |
+
|
| 251 |
+
job_url = f"https://huggingface.co/jobs/{job['owner']['name']}/{job['id']}"
|
| 252 |
+
|
| 253 |
+
# If detached, return immediately
|
| 254 |
+
if args.get('detach', False):
|
| 255 |
+
response = f"""Job started successfully!
|
| 256 |
+
|
| 257 |
+
**Job ID:** {job['id']}
|
| 258 |
+
**Status:** {job['status']['stage']}
|
| 259 |
+
**View at:** {job_url}
|
| 260 |
+
|
| 261 |
+
To check logs, call this tool with `{{"operation": "logs", "args": {{"job_id": "{job['id']}"}}}}`
|
| 262 |
+
To inspect, call this tool with `{{"operation": "inspect", "args": {{"job_id": "{job['id']}"}}}}`"""
|
| 263 |
+
return {
|
| 264 |
+
"formatted": response,
|
| 265 |
+
"totalResults": 1,
|
| 266 |
+
"resultsShared": 1
|
| 267 |
+
}
|
| 268 |
+
|
| 269 |
+
# Not detached - return job info and link to logs
|
| 270 |
+
response = f"""Job started: {job['id']}
|
| 271 |
+
|
| 272 |
+
**Status:** {job['status']['stage']}
|
| 273 |
+
**View logs at:** {job_url}
|
| 274 |
+
|
| 275 |
+
Note: Logs are being collected. Check the job page for real-time logs.
|
| 276 |
+
"""
|
| 277 |
+
return {
|
| 278 |
+
"formatted": response,
|
| 279 |
+
"totalResults": 1,
|
| 280 |
+
"resultsShared": 1
|
| 281 |
+
}
|
| 282 |
+
|
| 283 |
+
async def _run_uv_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 284 |
+
"""Run job with UV package manager"""
|
| 285 |
+
# UV jobs use a standard UV image
|
| 286 |
+
image = UV_DEFAULT_IMAGE
|
| 287 |
+
|
| 288 |
+
# Build UV command
|
| 289 |
+
command = resolve_uv_command(args)
|
| 290 |
+
|
| 291 |
+
# Convert to run args
|
| 292 |
+
run_args = {
|
| 293 |
+
'image': image,
|
| 294 |
+
'command': command,
|
| 295 |
+
'flavor': args.get('flavor', 'cpu-basic'),
|
| 296 |
+
'env': args.get('env'),
|
| 297 |
+
'secrets': args.get('secrets'),
|
| 298 |
+
'timeout': args.get('timeout', '30m'),
|
| 299 |
+
'detach': args.get('detach', False),
|
| 300 |
+
'namespace': args.get('namespace'),
|
| 301 |
+
}
|
| 302 |
+
|
| 303 |
+
return await self._run_job(run_args)
|
| 304 |
+
|
| 305 |
+
async def _list_jobs(self, args: Dict[str, Any]) -> ToolResult:
|
| 306 |
+
"""List user's jobs"""
|
| 307 |
+
# Fetch all jobs from API
|
| 308 |
+
all_jobs = await self.client.list_jobs(args.get('namespace'))
|
| 309 |
+
|
| 310 |
+
# Filter jobs
|
| 311 |
+
jobs = all_jobs
|
| 312 |
+
|
| 313 |
+
# Default: show only running jobs unless --all is specified
|
| 314 |
+
if not args.get('all', False):
|
| 315 |
+
jobs = [job for job in jobs if job['status']['stage'] == 'RUNNING']
|
| 316 |
+
|
| 317 |
+
# Apply status filter if specified
|
| 318 |
+
if args.get('status'):
|
| 319 |
+
status_filter = args['status'].upper()
|
| 320 |
+
jobs = [job for job in jobs if status_filter in job['status']['stage'].upper()]
|
| 321 |
+
|
| 322 |
+
# Format as markdown table
|
| 323 |
+
table = format_jobs_table(jobs)
|
| 324 |
+
|
| 325 |
+
if len(jobs) == 0:
|
| 326 |
+
if args.get('all', False):
|
| 327 |
+
return {
|
| 328 |
+
"formatted": "No jobs found.",
|
| 329 |
+
"totalResults": 0,
|
| 330 |
+
"resultsShared": 0
|
| 331 |
+
}
|
| 332 |
+
return {
|
| 333 |
+
"formatted": 'No running jobs found. Use `{"args": {"all": true}}` to show all jobs.',
|
| 334 |
+
"totalResults": 0,
|
| 335 |
+
"resultsShared": 0
|
| 336 |
+
}
|
| 337 |
+
|
| 338 |
+
response = f"**Jobs ({len(jobs)} of {len(all_jobs)} total):**\n\n{table}"
|
| 339 |
+
return {
|
| 340 |
+
"formatted": response,
|
| 341 |
+
"totalResults": len(all_jobs),
|
| 342 |
+
"resultsShared": len(jobs)
|
| 343 |
+
}
|
| 344 |
+
|
| 345 |
+
async def _get_logs(self, args: Dict[str, Any]) -> ToolResult:
|
| 346 |
+
"""Get logs for a job"""
|
| 347 |
+
job_id = args.get('job_id')
|
| 348 |
+
if not job_id:
|
| 349 |
+
return {
|
| 350 |
+
"formatted": "job_id is required",
|
| 351 |
+
"totalResults": 0,
|
| 352 |
+
"resultsShared": 0,
|
| 353 |
+
"isError": True
|
| 354 |
+
}
|
| 355 |
+
|
| 356 |
+
# Get namespace for the logs URL
|
| 357 |
+
namespace = await self.client.get_namespace(args.get('namespace'))
|
| 358 |
+
job_url = f"https://huggingface.co/jobs/{namespace}/{job_id}"
|
| 359 |
+
|
| 360 |
+
# For now, direct users to the web interface for logs
|
| 361 |
+
# Full SSE streaming implementation would be more complex
|
| 362 |
+
response = f"""**Logs for job {job_id}**
|
| 363 |
+
|
| 364 |
+
View real-time logs at: {job_url}
|
| 365 |
+
|
| 366 |
+
Note: Full log streaming support is coming soon. Please use the web interface for now.
|
| 367 |
+
"""
|
| 368 |
+
return {
|
| 369 |
+
"formatted": response,
|
| 370 |
+
"totalResults": 1,
|
| 371 |
+
"resultsShared": 1
|
| 372 |
+
}
|
| 373 |
+
|
| 374 |
+
async def _inspect_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 375 |
+
"""Get detailed information about one or more jobs"""
|
| 376 |
+
job_id = args.get('job_id')
|
| 377 |
+
if not job_id:
|
| 378 |
+
return {
|
| 379 |
+
"formatted": "job_id is required",
|
| 380 |
+
"totalResults": 0,
|
| 381 |
+
"resultsShared": 0,
|
| 382 |
+
"isError": True
|
| 383 |
+
}
|
| 384 |
+
|
| 385 |
+
job_ids = job_id if isinstance(job_id, list) else [job_id]
|
| 386 |
+
|
| 387 |
+
# Fetch all jobs
|
| 388 |
+
jobs = []
|
| 389 |
+
for jid in job_ids:
|
| 390 |
+
try:
|
| 391 |
+
job = await self.client.get_job(jid, args.get('namespace'))
|
| 392 |
+
jobs.append(job)
|
| 393 |
+
except Exception as e:
|
| 394 |
+
raise Exception(f"Failed to fetch job {jid}: {str(e)}")
|
| 395 |
+
|
| 396 |
+
formatted_details = format_job_details(jobs)
|
| 397 |
+
response = f"**Job Details** ({len(jobs)} job{'s' if len(jobs) > 1 else ''}):\n\n{formatted_details}"
|
| 398 |
+
|
| 399 |
+
return {
|
| 400 |
+
"formatted": response,
|
| 401 |
+
"totalResults": len(jobs),
|
| 402 |
+
"resultsShared": len(jobs)
|
| 403 |
+
}
|
| 404 |
+
|
| 405 |
+
async def _cancel_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 406 |
+
"""Cancel a running job"""
|
| 407 |
+
job_id = args.get('job_id')
|
| 408 |
+
if not job_id:
|
| 409 |
+
return {
|
| 410 |
+
"formatted": "job_id is required",
|
| 411 |
+
"totalResults": 0,
|
| 412 |
+
"resultsShared": 0,
|
| 413 |
+
"isError": True
|
| 414 |
+
}
|
| 415 |
+
|
| 416 |
+
await self.client.cancel_job(job_id, args.get('namespace'))
|
| 417 |
+
|
| 418 |
+
response = f"""✓ Job {job_id} has been cancelled.
|
| 419 |
+
|
| 420 |
+
To verify, call this tool with `{{"operation": "inspect", "args": {{"job_id": "{job_id}"}}}}`"""
|
| 421 |
+
|
| 422 |
+
return {
|
| 423 |
+
"formatted": response,
|
| 424 |
+
"totalResults": 1,
|
| 425 |
+
"resultsShared": 1
|
| 426 |
+
}
|
| 427 |
+
|
| 428 |
+
async def _scheduled_run(self, args: Dict[str, Any]) -> ToolResult:
|
| 429 |
+
"""Create a scheduled job"""
|
| 430 |
+
# Create job spec
|
| 431 |
+
job_spec = create_job_spec({
|
| 432 |
+
'image': args.get('image', 'python:3.12'),
|
| 433 |
+
'command': args.get('command'),
|
| 434 |
+
'flavor': args.get('flavor', 'cpu-basic'),
|
| 435 |
+
'env': args.get('env'),
|
| 436 |
+
'secrets': args.get('secrets'),
|
| 437 |
+
'timeout': args.get('timeout', '30m'),
|
| 438 |
+
'hfToken': self.hf_token,
|
| 439 |
+
})
|
| 440 |
+
|
| 441 |
+
# Create scheduled job spec
|
| 442 |
+
scheduled_spec = {
|
| 443 |
+
'schedule': args.get('schedule'),
|
| 444 |
+
'suspend': args.get('suspend', False),
|
| 445 |
+
'jobSpec': job_spec,
|
| 446 |
+
}
|
| 447 |
+
|
| 448 |
+
# Submit scheduled job
|
| 449 |
+
scheduled_job = await self.client.create_scheduled_job(scheduled_spec, args.get('namespace'))
|
| 450 |
+
|
| 451 |
+
response = f"""✓ Scheduled job created successfully!
|
| 452 |
+
|
| 453 |
+
**Scheduled Job ID:** {scheduled_job['id']}
|
| 454 |
+
**Schedule:** {scheduled_job['schedule']}
|
| 455 |
+
**Suspended:** {'Yes' if scheduled_job.get('suspend') else 'No'}
|
| 456 |
+
**Next Run:** {scheduled_job.get('nextRun', 'N/A')}
|
| 457 |
+
|
| 458 |
+
To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_job['id']}"}}}}`
|
| 459 |
+
To list all, call this tool with `{{"operation": "scheduled ps"}}`"""
|
| 460 |
+
|
| 461 |
+
return {
|
| 462 |
+
"formatted": response,
|
| 463 |
+
"totalResults": 1,
|
| 464 |
+
"resultsShared": 1
|
| 465 |
+
}
|
| 466 |
+
|
| 467 |
+
async def _scheduled_uv(self, args: Dict[str, Any]) -> ToolResult:
|
| 468 |
+
"""Create a scheduled UV job"""
|
| 469 |
+
# For UV, use standard UV image
|
| 470 |
+
image = UV_DEFAULT_IMAGE
|
| 471 |
+
|
| 472 |
+
# Build UV command
|
| 473 |
+
command = resolve_uv_command(args)
|
| 474 |
+
|
| 475 |
+
# Convert to scheduled run args
|
| 476 |
+
scheduled_run_args = {
|
| 477 |
+
'schedule': args.get('schedule'),
|
| 478 |
+
'suspend': args.get('suspend', False),
|
| 479 |
+
'image': image,
|
| 480 |
+
'command': command,
|
| 481 |
+
'flavor': args.get('flavor', 'cpu-basic'),
|
| 482 |
+
'env': args.get('env'),
|
| 483 |
+
'secrets': args.get('secrets'),
|
| 484 |
+
'timeout': args.get('timeout', '30m'),
|
| 485 |
+
'namespace': args.get('namespace'),
|
| 486 |
+
}
|
| 487 |
+
|
| 488 |
+
return await self._scheduled_run(scheduled_run_args)
|
| 489 |
+
|
| 490 |
+
async def _list_scheduled_jobs(self, args: Dict[str, Any]) -> ToolResult:
|
| 491 |
+
"""List scheduled jobs"""
|
| 492 |
+
# Fetch all scheduled jobs
|
| 493 |
+
all_jobs = await self.client.list_scheduled_jobs(args.get('namespace'))
|
| 494 |
+
|
| 495 |
+
# Filter jobs
|
| 496 |
+
jobs = all_jobs
|
| 497 |
+
|
| 498 |
+
# Default: hide suspended jobs unless --all is specified
|
| 499 |
+
if not args.get('all', False):
|
| 500 |
+
jobs = [job for job in jobs if not job.get('suspend', False)]
|
| 501 |
+
|
| 502 |
+
# Format as markdown table
|
| 503 |
+
table = format_scheduled_jobs_table(jobs)
|
| 504 |
+
|
| 505 |
+
if len(jobs) == 0:
|
| 506 |
+
if args.get('all', False):
|
| 507 |
+
return {
|
| 508 |
+
"formatted": "No scheduled jobs found.",
|
| 509 |
+
"totalResults": 0,
|
| 510 |
+
"resultsShared": 0
|
| 511 |
+
}
|
| 512 |
+
return {
|
| 513 |
+
"formatted": 'No active scheduled jobs found. Use `{"args": {"all": true}}` to show suspended jobs.',
|
| 514 |
+
"totalResults": 0,
|
| 515 |
+
"resultsShared": 0
|
| 516 |
+
}
|
| 517 |
+
|
| 518 |
+
response = f"**Scheduled Jobs ({len(jobs)} of {len(all_jobs)} total):**\n\n{table}"
|
| 519 |
+
return {
|
| 520 |
+
"formatted": response,
|
| 521 |
+
"totalResults": len(all_jobs),
|
| 522 |
+
"resultsShared": len(jobs)
|
| 523 |
+
}
|
| 524 |
+
|
| 525 |
+
async def _inspect_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 526 |
+
"""Get details of a scheduled job"""
|
| 527 |
+
scheduled_job_id = args.get('scheduled_job_id')
|
| 528 |
+
if not scheduled_job_id:
|
| 529 |
+
return {
|
| 530 |
+
"formatted": "scheduled_job_id is required",
|
| 531 |
+
"totalResults": 0,
|
| 532 |
+
"resultsShared": 0,
|
| 533 |
+
"isError": True
|
| 534 |
+
}
|
| 535 |
+
|
| 536 |
+
job = await self.client.get_scheduled_job(scheduled_job_id, args.get('namespace'))
|
| 537 |
+
formatted_details = format_scheduled_job_details(job)
|
| 538 |
+
|
| 539 |
+
return {
|
| 540 |
+
"formatted": f"**Scheduled Job Details:**\n\n{formatted_details}",
|
| 541 |
+
"totalResults": 1,
|
| 542 |
+
"resultsShared": 1
|
| 543 |
+
}
|
| 544 |
+
|
| 545 |
+
async def _delete_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 546 |
+
"""Delete a scheduled job"""
|
| 547 |
+
scheduled_job_id = args.get('scheduled_job_id')
|
| 548 |
+
if not scheduled_job_id:
|
| 549 |
+
return {
|
| 550 |
+
"formatted": "scheduled_job_id is required",
|
| 551 |
+
"totalResults": 0,
|
| 552 |
+
"resultsShared": 0,
|
| 553 |
+
"isError": True
|
| 554 |
+
}
|
| 555 |
+
|
| 556 |
+
await self.client.delete_scheduled_job(scheduled_job_id, args.get('namespace'))
|
| 557 |
+
|
| 558 |
+
return {
|
| 559 |
+
"formatted": f"✓ Scheduled job {scheduled_job_id} has been deleted.",
|
| 560 |
+
"totalResults": 1,
|
| 561 |
+
"resultsShared": 1
|
| 562 |
+
}
|
| 563 |
+
|
| 564 |
+
async def _suspend_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 565 |
+
"""Suspend a scheduled job"""
|
| 566 |
+
scheduled_job_id = args.get('scheduled_job_id')
|
| 567 |
+
if not scheduled_job_id:
|
| 568 |
+
return {
|
| 569 |
+
"formatted": "scheduled_job_id is required",
|
| 570 |
+
"totalResults": 0,
|
| 571 |
+
"resultsShared": 0,
|
| 572 |
+
"isError": True
|
| 573 |
+
}
|
| 574 |
+
|
| 575 |
+
await self.client.suspend_scheduled_job(scheduled_job_id, args.get('namespace'))
|
| 576 |
+
|
| 577 |
+
response = f"""✓ Scheduled job {scheduled_job_id} has been suspended.
|
| 578 |
+
|
| 579 |
+
To resume, call this tool with `{{"operation": "scheduled resume", "args": {{"scheduled_job_id": "{scheduled_job_id}"}}}}`"""
|
| 580 |
+
|
| 581 |
+
return {
|
| 582 |
+
"formatted": response,
|
| 583 |
+
"totalResults": 1,
|
| 584 |
+
"resultsShared": 1
|
| 585 |
+
}
|
| 586 |
+
|
| 587 |
+
async def _resume_scheduled_job(self, args: Dict[str, Any]) -> ToolResult:
|
| 588 |
+
"""Resume a suspended scheduled job"""
|
| 589 |
+
scheduled_job_id = args.get('scheduled_job_id')
|
| 590 |
+
if not scheduled_job_id:
|
| 591 |
+
return {
|
| 592 |
+
"formatted": "scheduled_job_id is required",
|
| 593 |
+
"totalResults": 0,
|
| 594 |
+
"resultsShared": 0,
|
| 595 |
+
"isError": True
|
| 596 |
+
}
|
| 597 |
+
|
| 598 |
+
await self.client.resume_scheduled_job(scheduled_job_id, args.get('namespace'))
|
| 599 |
+
|
| 600 |
+
response = f"""✓ Scheduled job {scheduled_job_id} has been resumed.
|
| 601 |
+
|
| 602 |
+
To inspect, call this tool with `{{"operation": "scheduled inspect", "args": {{"scheduled_job_id": "{scheduled_job_id}"}}}}`"""
|
| 603 |
+
|
| 604 |
+
return {
|
| 605 |
+
"formatted": response,
|
| 606 |
+
"totalResults": 1,
|
| 607 |
+
"resultsShared": 1
|
| 608 |
+
}
|
| 609 |
+
|
| 610 |
+
|
| 611 |
+
# Tool specification for agent registration
|
| 612 |
+
HF_JOBS_TOOL_SPEC = {
|
| 613 |
+
"name": "hf_jobs",
|
| 614 |
+
"description": (
|
| 615 |
+
"Manage Hugging Face CPU/GPU compute jobs. Run commands in Docker containers, "
|
| 616 |
+
"execute Python scripts with UV. List, schedule and monitor jobs/logs. "
|
| 617 |
+
"Call this tool with no operation for full usage instructions and examples."
|
| 618 |
+
),
|
| 619 |
+
"parameters": {
|
| 620 |
+
"type": "object",
|
| 621 |
+
"properties": {
|
| 622 |
+
"operation": {
|
| 623 |
+
"type": "string",
|
| 624 |
+
"enum": [
|
| 625 |
+
"run", "uv", "ps", "logs", "inspect", "cancel",
|
| 626 |
+
"scheduled run", "scheduled uv", "scheduled ps",
|
| 627 |
+
"scheduled inspect", "scheduled delete", "scheduled suspend", "scheduled resume"
|
| 628 |
+
],
|
| 629 |
+
"description": (
|
| 630 |
+
"Operation to execute. Valid values: run, uv, ps, logs, inspect, cancel, "
|
| 631 |
+
"scheduled run, scheduled uv, scheduled ps, scheduled inspect, scheduled delete, "
|
| 632 |
+
"scheduled suspend, scheduled resume"
|
| 633 |
+
)
|
| 634 |
+
},
|
| 635 |
+
"args": {
|
| 636 |
+
"type": "object",
|
| 637 |
+
"description": "Operation-specific arguments as a JSON object",
|
| 638 |
+
"additionalProperties": True
|
| 639 |
+
}
|
| 640 |
+
}
|
| 641 |
+
}
|
| 642 |
+
}
|
| 643 |
+
|
| 644 |
+
|
| 645 |
+
async def hf_jobs_handler(arguments: Dict[str, Any]) -> tuple[str, bool]:
|
| 646 |
+
"""Handler for agent tool router"""
|
| 647 |
+
try:
|
| 648 |
+
tool = HfJobsTool()
|
| 649 |
+
result = await tool.execute(arguments)
|
| 650 |
+
return result["formatted"], not result.get("isError", False)
|
| 651 |
+
except Exception as e:
|
| 652 |
+
return f"Error executing HF Jobs tool: {str(e)}", False
|
agent/tools/hf/jobs/uv_utils.py
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
UV command utilities
|
| 3 |
+
|
| 4 |
+
Ported from: hf-mcp-server/packages/mcp/src/jobs/commands/uv-utils.ts
|
| 5 |
+
"""
|
| 6 |
+
import base64
|
| 7 |
+
from typing import List, Dict, Optional, Any
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
UV_DEFAULT_IMAGE = 'ghcr.io/astral-sh/uv:python3.12-bookworm'
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def build_uv_command(script: str, args: Dict[str, Any]) -> List[str]:
|
| 14 |
+
"""Build UV run command"""
|
| 15 |
+
parts = ['uv', 'run']
|
| 16 |
+
|
| 17 |
+
# Add dependencies
|
| 18 |
+
with_deps = args.get('with_deps', [])
|
| 19 |
+
if with_deps:
|
| 20 |
+
for dep in with_deps:
|
| 21 |
+
parts.extend(['--with', dep])
|
| 22 |
+
|
| 23 |
+
# Add python version
|
| 24 |
+
python = args.get('python')
|
| 25 |
+
if python:
|
| 26 |
+
parts.extend(['-p', python])
|
| 27 |
+
|
| 28 |
+
parts.append(script)
|
| 29 |
+
|
| 30 |
+
# Add script arguments
|
| 31 |
+
script_args = args.get('script_args', [])
|
| 32 |
+
if script_args:
|
| 33 |
+
parts.extend(script_args)
|
| 34 |
+
|
| 35 |
+
return parts
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def wrap_inline_script(script: str, args: Dict[str, Any]) -> str:
|
| 39 |
+
"""Wrap inline script with base64 encoding for UV"""
|
| 40 |
+
encoded = base64.b64encode(script.encode('utf-8')).decode('utf-8')
|
| 41 |
+
base_command = build_uv_command('-', args)
|
| 42 |
+
# Shell quote the command parts
|
| 43 |
+
quoted_command = ' '.join(base_command)
|
| 44 |
+
return f'echo "{encoded}" | base64 -d | {quoted_command}'
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def resolve_uv_command(args: Dict[str, Any]) -> List[str]:
|
| 48 |
+
"""Resolve UV command based on script source"""
|
| 49 |
+
script_source = args.get('script', '')
|
| 50 |
+
|
| 51 |
+
options = {
|
| 52 |
+
'with_deps': args.get('with_deps'),
|
| 53 |
+
'python': args.get('python'),
|
| 54 |
+
'script_args': args.get('script_args'),
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
# URL script
|
| 58 |
+
if script_source.startswith('http://') or script_source.startswith('https://'):
|
| 59 |
+
return build_uv_command(script_source, options)
|
| 60 |
+
|
| 61 |
+
# Inline multi-line script
|
| 62 |
+
if '\n' in script_source:
|
| 63 |
+
return ['/bin/sh', '-lc', wrap_inline_script(script_source, options)]
|
| 64 |
+
|
| 65 |
+
# File path or single-line script
|
| 66 |
+
return build_uv_command(script_source, options)
|
agent/tools/hf/types.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Types for Hugging Face tools
|
| 3 |
+
|
| 4 |
+
Ported from: hf-mcp-server/packages/mcp/src/types/
|
| 5 |
+
"""
|
| 6 |
+
from typing import TypedDict, Optional
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class ToolResult(TypedDict, total=False):
|
| 10 |
+
"""Result returned by HF tool operations"""
|
| 11 |
+
formatted: str
|
| 12 |
+
totalResults: int
|
| 13 |
+
resultsShared: int
|
| 14 |
+
isError: bool
|
agent/tools/hf/utilities.py
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Utility functions for Hugging Face tools
|
| 3 |
+
|
| 4 |
+
Ported from: hf-mcp-server/packages/mcp/src/jobs/formatters.ts
|
| 5 |
+
"""
|
| 6 |
+
from typing import Any, Dict, List, Optional
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def truncate(text: str, max_length: int) -> str:
|
| 11 |
+
"""Truncate a string to a maximum length with ellipsis"""
|
| 12 |
+
if len(text) <= max_length:
|
| 13 |
+
return text
|
| 14 |
+
return text[:max_length - 3] + "..."
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def format_date(date_str: Optional[str]) -> str:
|
| 18 |
+
"""Format a date string to a readable format"""
|
| 19 |
+
if not date_str:
|
| 20 |
+
return "N/A"
|
| 21 |
+
try:
|
| 22 |
+
date = datetime.fromisoformat(date_str.replace('Z', '+00:00'))
|
| 23 |
+
return date.strftime('%Y-%m-%d %H:%M:%S')
|
| 24 |
+
except Exception:
|
| 25 |
+
return date_str
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
def format_command(command: Optional[List[str]]) -> str:
|
| 29 |
+
"""Format command array as a single string"""
|
| 30 |
+
if not command or len(command) == 0:
|
| 31 |
+
return "N/A"
|
| 32 |
+
return " ".join(command)
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
def get_image_or_space(job: Dict[str, Any]) -> str:
|
| 36 |
+
"""Get image/space identifier from job"""
|
| 37 |
+
if job.get('spaceId'):
|
| 38 |
+
return job['spaceId']
|
| 39 |
+
if job.get('dockerImage'):
|
| 40 |
+
return job['dockerImage']
|
| 41 |
+
return "N/A"
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
def format_jobs_table(jobs: List[Dict[str, Any]]) -> str:
|
| 45 |
+
"""Format jobs as a markdown table"""
|
| 46 |
+
if len(jobs) == 0:
|
| 47 |
+
return "No jobs found."
|
| 48 |
+
|
| 49 |
+
# Calculate dynamic ID column width
|
| 50 |
+
longest_id_length = max(len(job['id']) for job in jobs)
|
| 51 |
+
id_column_width = max(longest_id_length, len('JOB ID'))
|
| 52 |
+
|
| 53 |
+
# Define column widths
|
| 54 |
+
col_widths = {
|
| 55 |
+
'id': id_column_width,
|
| 56 |
+
'image': 20,
|
| 57 |
+
'command': 30,
|
| 58 |
+
'created': 19,
|
| 59 |
+
'status': 12,
|
| 60 |
+
}
|
| 61 |
+
|
| 62 |
+
# Build header
|
| 63 |
+
header = f"| {'JOB ID'.ljust(col_widths['id'])} | {'IMAGE/SPACE'.ljust(col_widths['image'])} | {'COMMAND'.ljust(col_widths['command'])} | {'CREATED'.ljust(col_widths['created'])} | {'STATUS'.ljust(col_widths['status'])} |"
|
| 64 |
+
separator = f"|{'-' * (col_widths['id'] + 2)}|{'-' * (col_widths['image'] + 2)}|{'-' * (col_widths['command'] + 2)}|{'-' * (col_widths['created'] + 2)}|{'-' * (col_widths['status'] + 2)}|"
|
| 65 |
+
|
| 66 |
+
# Build rows
|
| 67 |
+
rows = []
|
| 68 |
+
for job in jobs:
|
| 69 |
+
job_id = job['id']
|
| 70 |
+
image = truncate(get_image_or_space(job), col_widths['image'])
|
| 71 |
+
command = truncate(format_command(job.get('command')), col_widths['command'])
|
| 72 |
+
created = truncate(format_date(job.get('createdAt')), col_widths['created'])
|
| 73 |
+
status = truncate(job['status']['stage'], col_widths['status'])
|
| 74 |
+
|
| 75 |
+
rows.append(f"| {job_id.ljust(col_widths['id'])} | {image.ljust(col_widths['image'])} | {command.ljust(col_widths['command'])} | {created.ljust(col_widths['created'])} | {status.ljust(col_widths['status'])} |")
|
| 76 |
+
|
| 77 |
+
return '\n'.join([header, separator] + rows)
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def format_scheduled_jobs_table(jobs: List[Dict[str, Any]]) -> str:
|
| 81 |
+
"""Format scheduled jobs as a markdown table"""
|
| 82 |
+
if len(jobs) == 0:
|
| 83 |
+
return "No scheduled jobs found."
|
| 84 |
+
|
| 85 |
+
# Calculate dynamic ID column width
|
| 86 |
+
longest_id_length = max(len(job['id']) for job in jobs)
|
| 87 |
+
id_column_width = max(longest_id_length, len('ID'))
|
| 88 |
+
|
| 89 |
+
# Define column widths
|
| 90 |
+
col_widths = {
|
| 91 |
+
'id': id_column_width,
|
| 92 |
+
'schedule': 12,
|
| 93 |
+
'image': 18,
|
| 94 |
+
'command': 25,
|
| 95 |
+
'lastRun': 19,
|
| 96 |
+
'nextRun': 19,
|
| 97 |
+
'suspend': 9,
|
| 98 |
+
}
|
| 99 |
+
|
| 100 |
+
# Build header
|
| 101 |
+
header = f"| {'ID'.ljust(col_widths['id'])} | {'SCHEDULE'.ljust(col_widths['schedule'])} | {'IMAGE/SPACE'.ljust(col_widths['image'])} | {'COMMAND'.ljust(col_widths['command'])} | {'LAST RUN'.ljust(col_widths['lastRun'])} | {'NEXT RUN'.ljust(col_widths['nextRun'])} | {'SUSPENDED'.ljust(col_widths['suspend'])} |"
|
| 102 |
+
separator = f"|{'-' * (col_widths['id'] + 2)}|{'-' * (col_widths['schedule'] + 2)}|{'-' * (col_widths['image'] + 2)}|{'-' * (col_widths['command'] + 2)}|{'-' * (col_widths['lastRun'] + 2)}|{'-' * (col_widths['nextRun'] + 2)}|{'-' * (col_widths['suspend'] + 2)}|"
|
| 103 |
+
|
| 104 |
+
# Build rows
|
| 105 |
+
rows = []
|
| 106 |
+
for job in jobs:
|
| 107 |
+
job_id = job['id']
|
| 108 |
+
schedule = truncate(job['schedule'], col_widths['schedule'])
|
| 109 |
+
image = truncate(get_image_or_space(job['jobSpec']), col_widths['image'])
|
| 110 |
+
command = truncate(format_command(job['jobSpec'].get('command')), col_widths['command'])
|
| 111 |
+
last_run = truncate(format_date(job.get('lastRun')), col_widths['lastRun'])
|
| 112 |
+
next_run = truncate(format_date(job.get('nextRun')), col_widths['nextRun'])
|
| 113 |
+
suspend = 'Yes' if job.get('suspend') else 'No'
|
| 114 |
+
|
| 115 |
+
rows.append(f"| {job_id.ljust(col_widths['id'])} | {schedule.ljust(col_widths['schedule'])} | {image.ljust(col_widths['image'])} | {command.ljust(col_widths['command'])} | {last_run.ljust(col_widths['lastRun'])} | {next_run.ljust(col_widths['nextRun'])} | {suspend.ljust(col_widths['suspend'])} |")
|
| 116 |
+
|
| 117 |
+
return '\n'.join([header, separator] + rows)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def format_job_details(jobs: Any) -> str:
|
| 121 |
+
"""Format job details as JSON in a markdown code block"""
|
| 122 |
+
import json
|
| 123 |
+
job_array = jobs if isinstance(jobs, list) else [jobs]
|
| 124 |
+
json_str = json.dumps(job_array, indent=2)
|
| 125 |
+
return f"```json\n{json_str}\n```"
|
| 126 |
+
|
| 127 |
+
|
| 128 |
+
def format_scheduled_job_details(jobs: Any) -> str:
|
| 129 |
+
"""Format scheduled job details as JSON in a markdown code block"""
|
| 130 |
+
import json
|
| 131 |
+
job_array = jobs if isinstance(jobs, list) else [jobs]
|
| 132 |
+
json_str = json.dumps(job_array, indent=2)
|
| 133 |
+
return f"```json\n{json_str}\n```"
|
tests/tools/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Tests for agent tools"""
|
tests/tools/hf/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Tests for HF tools"""
|
tests/tools/hf/jobs/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""Tests for HF Jobs tool"""
|
tests/tools/hf/jobs/test_jobs_tool.py
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Tests for HF Jobs Tool
|
| 3 |
+
|
| 4 |
+
Tests the jobs tool implementation
|
| 5 |
+
"""
|
| 6 |
+
import pytest
|
| 7 |
+
from unittest.mock import AsyncMock, MagicMock, patch
|
| 8 |
+
from agent.tools.hf.jobs.jobs_tool import HfJobsTool, hf_jobs_handler
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
@pytest.mark.asyncio
|
| 12 |
+
async def test_show_help():
|
| 13 |
+
"""Test that help message is shown when no operation specified"""
|
| 14 |
+
tool = HfJobsTool()
|
| 15 |
+
result = await tool.execute({})
|
| 16 |
+
|
| 17 |
+
assert "HuggingFace Jobs API" in result["formatted"]
|
| 18 |
+
assert "Available Commands" in result["formatted"]
|
| 19 |
+
assert result["totalResults"] == 1
|
| 20 |
+
assert not result.get("isError", False)
|
| 21 |
+
|
| 22 |
+
|
| 23 |
+
@pytest.mark.asyncio
|
| 24 |
+
async def test_show_operation_help():
|
| 25 |
+
"""Test operation-specific help"""
|
| 26 |
+
tool = HfJobsTool()
|
| 27 |
+
result = await tool.execute({"operation": "run", "args": {"help": True}})
|
| 28 |
+
|
| 29 |
+
assert "Help for operation" in result["formatted"]
|
| 30 |
+
assert result["totalResults"] == 1
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@pytest.mark.asyncio
|
| 34 |
+
async def test_invalid_operation():
|
| 35 |
+
"""Test invalid operation handling"""
|
| 36 |
+
tool = HfJobsTool()
|
| 37 |
+
result = await tool.execute({"operation": "invalid_op"})
|
| 38 |
+
|
| 39 |
+
assert result.get("isError") == True
|
| 40 |
+
assert "Unknown operation" in result["formatted"]
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
@pytest.mark.asyncio
|
| 44 |
+
async def test_run_job_missing_command():
|
| 45 |
+
"""Test run job with missing required parameter"""
|
| 46 |
+
tool = HfJobsTool()
|
| 47 |
+
result = await tool.execute({
|
| 48 |
+
"operation": "run",
|
| 49 |
+
"args": {"image": "python:3.12"}
|
| 50 |
+
})
|
| 51 |
+
|
| 52 |
+
assert result.get("isError") == True
|
| 53 |
+
assert "command parameter is required" in result["formatted"]
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
@pytest.mark.asyncio
|
| 57 |
+
async def test_list_jobs_mock():
|
| 58 |
+
"""Test list jobs with mock API"""
|
| 59 |
+
tool = HfJobsTool()
|
| 60 |
+
|
| 61 |
+
# Mock the API client
|
| 62 |
+
with patch.object(tool.client, 'list_jobs', new_callable=AsyncMock) as mock_list:
|
| 63 |
+
mock_list.return_value = [
|
| 64 |
+
{
|
| 65 |
+
'id': 'test-job-1',
|
| 66 |
+
'status': {'stage': 'RUNNING'},
|
| 67 |
+
'command': ['echo', 'test'],
|
| 68 |
+
'createdAt': '2024-01-01T00:00:00Z',
|
| 69 |
+
'owner': {'name': 'test-user'}
|
| 70 |
+
},
|
| 71 |
+
{
|
| 72 |
+
'id': 'test-job-2',
|
| 73 |
+
'status': {'stage': 'COMPLETED'},
|
| 74 |
+
'command': ['python', 'script.py'],
|
| 75 |
+
'createdAt': '2024-01-01T01:00:00Z',
|
| 76 |
+
'owner': {'name': 'test-user'}
|
| 77 |
+
}
|
| 78 |
+
]
|
| 79 |
+
|
| 80 |
+
# Test listing only running jobs (default)
|
| 81 |
+
result = await tool.execute({"operation": "ps"})
|
| 82 |
+
|
| 83 |
+
assert not result.get("isError", False)
|
| 84 |
+
assert "test-job-1" in result["formatted"]
|
| 85 |
+
assert "test-job-2" not in result["formatted"] # COMPLETED jobs filtered out
|
| 86 |
+
assert result["totalResults"] == 2
|
| 87 |
+
assert result["resultsShared"] == 1
|
| 88 |
+
|
| 89 |
+
# Test listing all jobs
|
| 90 |
+
result = await tool.execute({"operation": "ps", "args": {"all": True}})
|
| 91 |
+
|
| 92 |
+
assert not result.get("isError", False)
|
| 93 |
+
assert "test-job-1" in result["formatted"]
|
| 94 |
+
assert "test-job-2" in result["formatted"]
|
| 95 |
+
assert result["totalResults"] == 2
|
| 96 |
+
assert result["resultsShared"] == 2
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
@pytest.mark.asyncio
|
| 100 |
+
async def test_inspect_job_mock():
|
| 101 |
+
"""Test inspect job with mock API"""
|
| 102 |
+
tool = HfJobsTool()
|
| 103 |
+
|
| 104 |
+
with patch.object(tool.client, 'get_job', new_callable=AsyncMock) as mock_get:
|
| 105 |
+
mock_get.return_value = {
|
| 106 |
+
'id': 'test-job-1',
|
| 107 |
+
'status': {'stage': 'RUNNING'},
|
| 108 |
+
'command': ['echo', 'test'],
|
| 109 |
+
'createdAt': '2024-01-01T00:00:00Z',
|
| 110 |
+
'owner': {'name': 'test-user'},
|
| 111 |
+
'flavor': 'cpu-basic'
|
| 112 |
+
}
|
| 113 |
+
|
| 114 |
+
result = await tool.execute({
|
| 115 |
+
"operation": "inspect",
|
| 116 |
+
"args": {"job_id": "test-job-1"}
|
| 117 |
+
})
|
| 118 |
+
|
| 119 |
+
assert not result.get("isError", False)
|
| 120 |
+
assert "test-job-1" in result["formatted"]
|
| 121 |
+
assert "Job Details" in result["formatted"]
|
| 122 |
+
mock_get.assert_called_once()
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
@pytest.mark.asyncio
|
| 126 |
+
async def test_cancel_job_mock():
|
| 127 |
+
"""Test cancel job with mock API"""
|
| 128 |
+
tool = HfJobsTool()
|
| 129 |
+
|
| 130 |
+
with patch.object(tool.client, 'cancel_job', new_callable=AsyncMock) as mock_cancel:
|
| 131 |
+
mock_cancel.return_value = None
|
| 132 |
+
|
| 133 |
+
result = await tool.execute({
|
| 134 |
+
"operation": "cancel",
|
| 135 |
+
"args": {"job_id": "test-job-1"}
|
| 136 |
+
})
|
| 137 |
+
|
| 138 |
+
assert not result.get("isError", False)
|
| 139 |
+
assert "cancelled" in result["formatted"]
|
| 140 |
+
assert "test-job-1" in result["formatted"]
|
| 141 |
+
mock_cancel.assert_called_once()
|
| 142 |
+
|
| 143 |
+
|
| 144 |
+
@pytest.mark.asyncio
|
| 145 |
+
async def test_handler():
|
| 146 |
+
"""Test the handler function"""
|
| 147 |
+
with patch('agent.tools.hf.jobs.jobs_tool.HfJobsTool') as MockTool:
|
| 148 |
+
mock_tool_instance = MockTool.return_value
|
| 149 |
+
mock_tool_instance.execute = AsyncMock(return_value={
|
| 150 |
+
"formatted": "Test output",
|
| 151 |
+
"totalResults": 1,
|
| 152 |
+
"resultsShared": 1,
|
| 153 |
+
"isError": False
|
| 154 |
+
})
|
| 155 |
+
|
| 156 |
+
output, success = await hf_jobs_handler({"operation": "ps"})
|
| 157 |
+
|
| 158 |
+
assert success == True
|
| 159 |
+
assert "Test output" in output
|
| 160 |
+
|
| 161 |
+
|
| 162 |
+
@pytest.mark.asyncio
|
| 163 |
+
async def test_handler_error():
|
| 164 |
+
"""Test handler with error"""
|
| 165 |
+
with patch('agent.tools.hf.jobs.jobs_tool.HfJobsTool') as MockTool:
|
| 166 |
+
MockTool.side_effect = Exception("Test error")
|
| 167 |
+
|
| 168 |
+
output, success = await hf_jobs_handler({})
|
| 169 |
+
|
| 170 |
+
assert success == False
|
| 171 |
+
assert "Error" in output
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
@pytest.mark.asyncio
|
| 175 |
+
async def test_scheduled_jobs_mock():
|
| 176 |
+
"""Test scheduled jobs operations with mock API"""
|
| 177 |
+
tool = HfJobsTool()
|
| 178 |
+
|
| 179 |
+
# Test list scheduled jobs
|
| 180 |
+
with patch.object(tool.client, 'list_scheduled_jobs', new_callable=AsyncMock) as mock_list:
|
| 181 |
+
mock_list.return_value = [
|
| 182 |
+
{
|
| 183 |
+
'id': 'sched-job-1',
|
| 184 |
+
'schedule': '@daily',
|
| 185 |
+
'suspend': False,
|
| 186 |
+
'jobSpec': {
|
| 187 |
+
'command': ['python', 'backup.py'],
|
| 188 |
+
'dockerImage': 'python:3.12'
|
| 189 |
+
},
|
| 190 |
+
'nextRun': '2024-01-02T00:00:00Z'
|
| 191 |
+
}
|
| 192 |
+
]
|
| 193 |
+
|
| 194 |
+
result = await tool.execute({"operation": "scheduled ps"})
|
| 195 |
+
|
| 196 |
+
assert not result.get("isError", False)
|
| 197 |
+
assert "sched-job-1" in result["formatted"]
|
| 198 |
+
assert "Scheduled Jobs" in result["formatted"]
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
def test_job_utils():
|
| 202 |
+
"""Test job utility functions"""
|
| 203 |
+
from agent.tools.hf.jobs.job_utils import parse_timeout, parse_image_source, parse_command
|
| 204 |
+
|
| 205 |
+
# Test timeout parsing
|
| 206 |
+
assert parse_timeout("5m") == 300
|
| 207 |
+
assert parse_timeout("2h") == 7200
|
| 208 |
+
assert parse_timeout("30s") == 30
|
| 209 |
+
assert parse_timeout("1d") == 86400
|
| 210 |
+
|
| 211 |
+
# Test image source parsing
|
| 212 |
+
result = parse_image_source("python:3.12")
|
| 213 |
+
assert result["dockerImage"] == "python:3.12"
|
| 214 |
+
assert result["spaceId"] is None
|
| 215 |
+
|
| 216 |
+
result = parse_image_source("https://huggingface.co/spaces/user/space")
|
| 217 |
+
assert result["dockerImage"] is None
|
| 218 |
+
assert result["spaceId"] == "user/space"
|
| 219 |
+
|
| 220 |
+
# Test command parsing
|
| 221 |
+
result = parse_command(["python", "script.py"])
|
| 222 |
+
assert result["command"] == ["python", "script.py"]
|
| 223 |
+
|
| 224 |
+
result = parse_command("python script.py")
|
| 225 |
+
assert result["command"] == ["python", "script.py"]
|
| 226 |
+
|
| 227 |
+
|
| 228 |
+
def test_uv_utils():
|
| 229 |
+
"""Test UV utility functions"""
|
| 230 |
+
from agent.tools.hf.jobs.uv_utils import build_uv_command, resolve_uv_command
|
| 231 |
+
|
| 232 |
+
# Test build UV command
|
| 233 |
+
command = build_uv_command("script.py", {})
|
| 234 |
+
assert command == ["uv", "run", "script.py"]
|
| 235 |
+
|
| 236 |
+
command = build_uv_command("script.py", {
|
| 237 |
+
"with_deps": ["requests", "numpy"],
|
| 238 |
+
"python": "3.12"
|
| 239 |
+
})
|
| 240 |
+
assert "uv" in command
|
| 241 |
+
assert "run" in command
|
| 242 |
+
assert "--with" in command
|
| 243 |
+
assert "requests" in command
|
| 244 |
+
assert "-p" in command
|
| 245 |
+
assert "3.12" in command
|
| 246 |
+
|
| 247 |
+
# Test resolve UV command
|
| 248 |
+
command = resolve_uv_command({"script": "https://example.com/script.py"})
|
| 249 |
+
assert "https://example.com/script.py" in command
|
| 250 |
+
|
| 251 |
+
command = resolve_uv_command({"script": "print('hello')"})
|
| 252 |
+
assert command == ["uv", "run", "print('hello')"]
|