Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import requests | |
| import json | |
| def fetch_manifest(model_id, tag_name): | |
| try: | |
| model_id = model_id.replace("hf.co", "").replace("://", "").replace("/", " ").strip().replace(" ", "/") | |
| # Fetch manifest | |
| manifest_url = f"https://huggingface.co/v2/{model_id}/manifests/{tag_name}" | |
| manifest_response = requests.get(manifest_url) | |
| manifest_response.raise_for_status() | |
| manifest_data = manifest_response.json() | |
| # Initialize output | |
| output = f"Manifest for {model_id}:{tag_name}\n" | |
| output += json.dumps(manifest_data, indent=2) + "\n" | |
| # Find template and params layers | |
| system_layer = next((layer for layer in manifest_data["layers"] | |
| if layer["mediaType"] == "application/vnd.ollama.image.system"), None) | |
| template_layer = next((layer for layer in manifest_data["layers"] | |
| if layer["mediaType"] == "application/vnd.ollama.image.template"), None) | |
| params_layer = next((layer for layer in manifest_data["layers"] | |
| if layer["mediaType"] == "application/vnd.ollama.image.params"), None) | |
| # Fetch and display system if found | |
| if system_layer: | |
| system_url = f"https://huggingface.co/v2/{model_id}/blobs/{system_layer['digest']}" | |
| system_response = requests.get(system_url) | |
| system_response.raise_for_status() | |
| output += "\n\n======================\n\nSystem message:\n" | |
| output += system_response.text + "\n" | |
| # Fetch and display template if found | |
| if template_layer: | |
| template_url = f"https://huggingface.co/v2/{model_id}/blobs/{template_layer['digest']}" | |
| template_response = requests.get(template_url) | |
| template_response.raise_for_status() | |
| output += "\n\n======================\n\nTemplate:\n" | |
| output += template_response.text + "\n" | |
| # Fetch and display params if found | |
| if params_layer: | |
| params_url = f"https://huggingface.co/v2/{model_id}/blobs/{params_layer['digest']}" | |
| params_response = requests.get(params_url) | |
| params_response.raise_for_status() | |
| output += "\n\n======================\n\nParameters:\n" | |
| output += json.dumps(params_response.json(), indent=2) + "\n" | |
| return output | |
| except requests.exceptions.RequestException as e: | |
| raise gr.Error(f"Error occurred: {str(e)}") | |
| except Exception as e: | |
| raise gr.Error(f"Unexpected error: {str(e)}") | |
| # Create Gradio interface | |
| iface = gr.Interface( | |
| fn=fetch_manifest, | |
| inputs=[ | |
| gr.Textbox(label="Model ID (e.g., bartowski/Meta-Llama-3.1-8B-Instruct-GGUF)", placeholder="Enter model ID"), | |
| gr.Textbox("latest", label="Tag Name (e.g., latest)", placeholder="Enter tag name") | |
| ], | |
| outputs=gr.Code(label="Results", lines=40), | |
| title="Hugging Face Model Manifest Viewer", | |
| description="Enter a Hugging Face model ID and tag name to view its manifest, template, and parameters.", | |
| ) | |
| iface.launch() |