Spaces:
Sleeping
Sleeping
implement list jobs
Browse files- start_app.py +33 -5
start_app.py
CHANGED
|
@@ -6,6 +6,7 @@ import time
|
|
| 6 |
import yaml
|
| 7 |
|
| 8 |
import gradio as gr
|
|
|
|
| 9 |
import requests
|
| 10 |
from huggingface_hub import HfApi, get_token
|
| 11 |
|
|
@@ -50,7 +51,7 @@ def dry_run(src, config, split, dst, query):
|
|
| 50 |
args = ["--src", src, "--config", config, "--split", split, "--dst", dst, "--query", query, DRY_RUN]
|
| 51 |
cmd = CMD + args
|
| 52 |
logs = "Job:\n\n```bash\n" + " ".join('"' + arg.replace('"', '\"""') + '"' if " " in arg else arg for arg in cmd) + "\n```\nOutput:\n\n"
|
| 53 |
-
yield {output_markdown: logs, progress_labels: gr.Label(visible=False)}
|
| 54 |
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
| 55 |
for line in iter(process.stdout.readline, b""):
|
| 56 |
logs += line.decode()
|
|
@@ -114,6 +115,7 @@ def run(src, config, split, dst, query, oauth_token: gr.OAuthToken | None, profi
|
|
| 114 |
pbars = {"Finished with an error ❌": 1.0}
|
| 115 |
yield {output_markdown: logs, progress_labels: gr.Label(pbars, visible=bool(pbars))}
|
| 116 |
|
|
|
|
| 117 |
READ_FUNCTIONS = ("pl.read_parquet", "pl.read_csv", "pl.read_json")
|
| 118 |
NUM_TRENDING_DATASETS = 10
|
| 119 |
|
|
@@ -140,11 +142,11 @@ with gr.Blocks() as demo:
|
|
| 140 |
if DRY_RUN:
|
| 141 |
dry_run_button = gr.Button("Dry-Run")
|
| 142 |
progress_labels= gr.Label(visible=False, label="Progress")
|
| 143 |
-
with gr.Accordion("Details", open=False):
|
| 144 |
output_markdown = gr.Markdown(label="Output logs")
|
| 145 |
-
run_button.click(run, inputs=[dataset_dropdown, subset_dropdown, split_dropdown, dst_dropdown, query_textarea], outputs=[progress_labels, output_markdown])
|
| 146 |
if DRY_RUN:
|
| 147 |
-
dry_run_button.click(dry_run, inputs=[dataset_dropdown, subset_dropdown, split_dropdown, dst_dropdown, query_textarea], outputs=[progress_labels, output_markdown])
|
| 148 |
|
| 149 |
def show_subset_dropdown(dataset: str):
|
| 150 |
if dataset and "/" not in dataset.strip().strip("/"):
|
|
@@ -193,8 +195,34 @@ if HELP:
|
|
| 193 |
with demo.route("Help", "/help"):
|
| 194 |
gr.Markdown(f"# Help\n\n```\n{HELP}\n```")
|
| 195 |
|
| 196 |
-
with demo.route("Jobs", "/jobs"):
|
| 197 |
gr.Markdown("# Jobs")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 198 |
|
| 199 |
if __name__ == "__main__":
|
| 200 |
demo.launch(server_name="0.0.0.0")
|
|
|
|
| 6 |
import yaml
|
| 7 |
|
| 8 |
import gradio as gr
|
| 9 |
+
import pandas as pd
|
| 10 |
import requests
|
| 11 |
from huggingface_hub import HfApi, get_token
|
| 12 |
|
|
|
|
| 51 |
args = ["--src", src, "--config", config, "--split", split, "--dst", dst, "--query", query, DRY_RUN]
|
| 52 |
cmd = CMD + args
|
| 53 |
logs = "Job:\n\n```bash\n" + " ".join('"' + arg.replace('"', '\"""') + '"' if " " in arg else arg for arg in cmd) + "\n```\nOutput:\n\n"
|
| 54 |
+
yield {output_markdown: logs, progress_labels: gr.Label(visible=False), details_accordion: gr.Accordion(open=True)}
|
| 55 |
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
| 56 |
for line in iter(process.stdout.readline, b""):
|
| 57 |
logs += line.decode()
|
|
|
|
| 115 |
pbars = {"Finished with an error ❌": 1.0}
|
| 116 |
yield {output_markdown: logs, progress_labels: gr.Label(pbars, visible=bool(pbars))}
|
| 117 |
|
| 118 |
+
|
| 119 |
READ_FUNCTIONS = ("pl.read_parquet", "pl.read_csv", "pl.read_json")
|
| 120 |
NUM_TRENDING_DATASETS = 10
|
| 121 |
|
|
|
|
| 142 |
if DRY_RUN:
|
| 143 |
dry_run_button = gr.Button("Dry-Run")
|
| 144 |
progress_labels= gr.Label(visible=False, label="Progress")
|
| 145 |
+
with gr.Accordion("Details", open=False) as details_accordion:
|
| 146 |
output_markdown = gr.Markdown(label="Output logs")
|
| 147 |
+
run_button.click(run, inputs=[dataset_dropdown, subset_dropdown, split_dropdown, dst_dropdown, query_textarea], outputs=[details_accordion, progress_labels, output_markdown])
|
| 148 |
if DRY_RUN:
|
| 149 |
+
dry_run_button.click(dry_run, inputs=[dataset_dropdown, subset_dropdown, split_dropdown, dst_dropdown, query_textarea], outputs=[details_accordion, progress_labels, output_markdown])
|
| 150 |
|
| 151 |
def show_subset_dropdown(dataset: str):
|
| 152 |
if dataset and "/" not in dataset.strip().strip("/"):
|
|
|
|
| 195 |
with demo.route("Help", "/help"):
|
| 196 |
gr.Markdown(f"# Help\n\n```\n{HELP}\n```")
|
| 197 |
|
| 198 |
+
with demo.route("Jobs", "/jobs") as page:
|
| 199 |
gr.Markdown("# Jobs")
|
| 200 |
+
jobs_dataframe = gr.DataFrame()
|
| 201 |
+
|
| 202 |
+
@page.load(outputs=[jobs_dataframe])
|
| 203 |
+
def list_jobs(oauth_token: gr.OAuthToken | None, profile: gr.OAuthProfile | None):
|
| 204 |
+
if oauth_token and profile:
|
| 205 |
+
token = oauth_token.token
|
| 206 |
+
username = profile.username
|
| 207 |
+
elif (token := get_token()):
|
| 208 |
+
username = HfApi().whoami(token=token)["name"]
|
| 209 |
+
else:
|
| 210 |
+
return pd.DataFrame({"Log in to see jobs": []})
|
| 211 |
+
resp = requests.get(
|
| 212 |
+
f"https://huggingface.co/api/jobs/{username}",
|
| 213 |
+
headers={"Authorization": f"Bearer {token}"}
|
| 214 |
+
)
|
| 215 |
+
return pd.DataFrame([
|
| 216 |
+
{
|
| 217 |
+
"id": job["metadata"]["id"],
|
| 218 |
+
"created_at": job["metadata"]["created_at"],
|
| 219 |
+
"stage": job["compute"]["status"]["stage"],
|
| 220 |
+
"command": str(job["compute"]["spec"]["extra"]["command"]),
|
| 221 |
+
"args": str(job["compute"]["spec"]["extra"]["args"]),
|
| 222 |
+
}
|
| 223 |
+
for job in resp.json()
|
| 224 |
+
if job["compute"]["spec"]["extra"]["input"]["spaceId"] == spaceId
|
| 225 |
+
])
|
| 226 |
|
| 227 |
if __name__ == "__main__":
|
| 228 |
demo.launch(server_name="0.0.0.0")
|