Spaces:
Running
Running
add trending models and spaces
Browse files- anycoder_app/deploy.py +228 -0
- anycoder_app/ui.py +154 -1
anycoder_app/deploy.py
CHANGED
|
@@ -1823,6 +1823,234 @@ def _generate_gradio_app_from_diffusers(repo_id: str) -> str:
|
|
| 1823 |
" demo.launch()\n"
|
| 1824 |
)
|
| 1825 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1826 |
def import_repo_to_app(url: str, framework: str = "Gradio") -> Tuple[str, str, str]:
|
| 1827 |
"""Import a GitHub or HF model repo and return the raw code snippet from README/model card.
|
| 1828 |
|
|
|
|
| 1823 |
" demo.launch()\n"
|
| 1824 |
)
|
| 1825 |
|
| 1826 |
+
def get_trending_models(limit: int = 10) -> List[Tuple[str, str]]:
|
| 1827 |
+
"""
|
| 1828 |
+
Fetch top trending models from HuggingFace Hub.
|
| 1829 |
+
|
| 1830 |
+
Returns a list of tuples: (display_name, model_id)
|
| 1831 |
+
Display name format: "model_name (task)"
|
| 1832 |
+
"""
|
| 1833 |
+
try:
|
| 1834 |
+
# Use the HuggingFace trending API endpoint directly
|
| 1835 |
+
response = requests.get("https://huggingface.co/api/trending")
|
| 1836 |
+
|
| 1837 |
+
if response.status_code != 200:
|
| 1838 |
+
print(f"Failed to fetch trending models: HTTP {response.status_code}")
|
| 1839 |
+
return [("Unable to load trending models", "")]
|
| 1840 |
+
|
| 1841 |
+
trending_data = response.json()
|
| 1842 |
+
|
| 1843 |
+
# The API returns {"recentlyTrending": [...]}
|
| 1844 |
+
recently_trending = trending_data.get("recentlyTrending", [])
|
| 1845 |
+
|
| 1846 |
+
if not recently_trending:
|
| 1847 |
+
print("No trending items found in API response")
|
| 1848 |
+
return [("No trending models available", "")]
|
| 1849 |
+
|
| 1850 |
+
trending_list = []
|
| 1851 |
+
count = 0
|
| 1852 |
+
|
| 1853 |
+
# Process trending items, filter for models only
|
| 1854 |
+
for item in recently_trending:
|
| 1855 |
+
if count >= limit:
|
| 1856 |
+
break
|
| 1857 |
+
|
| 1858 |
+
try:
|
| 1859 |
+
# Check if this is a model (not a space or dataset)
|
| 1860 |
+
repo_type = item.get("repoType")
|
| 1861 |
+
if repo_type != "model":
|
| 1862 |
+
continue
|
| 1863 |
+
|
| 1864 |
+
# Extract model data
|
| 1865 |
+
repo_data = item.get("repoData", {})
|
| 1866 |
+
model_id = repo_data.get("id")
|
| 1867 |
+
|
| 1868 |
+
if not model_id:
|
| 1869 |
+
continue
|
| 1870 |
+
|
| 1871 |
+
# Get pipeline tag (task type)
|
| 1872 |
+
pipeline_tag = repo_data.get("pipeline_tag")
|
| 1873 |
+
|
| 1874 |
+
# Default to "general" if no task found
|
| 1875 |
+
task = pipeline_tag or "general"
|
| 1876 |
+
|
| 1877 |
+
# Clean up task name for display
|
| 1878 |
+
task_display = task.replace("-", " ").title() if task != "general" else "General"
|
| 1879 |
+
|
| 1880 |
+
# Create display name: "model_name (Task)"
|
| 1881 |
+
display_name = f"{model_id} ({task_display})"
|
| 1882 |
+
trending_list.append((display_name, model_id))
|
| 1883 |
+
count += 1
|
| 1884 |
+
|
| 1885 |
+
except Exception as model_error:
|
| 1886 |
+
print(f"Error processing trending item: {model_error}")
|
| 1887 |
+
continue
|
| 1888 |
+
|
| 1889 |
+
if not trending_list:
|
| 1890 |
+
print("No models found in trending list, using fallback")
|
| 1891 |
+
# Fallback: use list_models with downloads sort
|
| 1892 |
+
try:
|
| 1893 |
+
api = HfApi()
|
| 1894 |
+
models = api.list_models(sort="downloads", limit=limit)
|
| 1895 |
+
for model in models:
|
| 1896 |
+
model_id = model.id
|
| 1897 |
+
task = getattr(model, "pipeline_tag", None) or "general"
|
| 1898 |
+
task_display = task.replace("-", " ").title() if task != "general" else "General"
|
| 1899 |
+
display_name = f"{model_id} ({task_display})"
|
| 1900 |
+
trending_list.append((display_name, model_id))
|
| 1901 |
+
except Exception as fallback_error:
|
| 1902 |
+
print(f"Fallback also failed: {fallback_error}")
|
| 1903 |
+
return [("No models available", "")]
|
| 1904 |
+
|
| 1905 |
+
return trending_list
|
| 1906 |
+
|
| 1907 |
+
except Exception as e:
|
| 1908 |
+
print(f"Error fetching trending models: {e}")
|
| 1909 |
+
# Fallback to most downloaded models
|
| 1910 |
+
try:
|
| 1911 |
+
api = HfApi()
|
| 1912 |
+
models = api.list_models(sort="downloads", limit=limit)
|
| 1913 |
+
trending_list = []
|
| 1914 |
+
for model in models:
|
| 1915 |
+
model_id = model.id
|
| 1916 |
+
task = getattr(model, "pipeline_tag", None) or "general"
|
| 1917 |
+
task_display = task.replace("-", " ").title() if task != "general" else "General"
|
| 1918 |
+
display_name = f"{model_id} ({task_display})"
|
| 1919 |
+
trending_list.append((display_name, model_id))
|
| 1920 |
+
return trending_list
|
| 1921 |
+
except Exception:
|
| 1922 |
+
return [("Error loading models", "")]
|
| 1923 |
+
|
| 1924 |
+
|
| 1925 |
+
def get_trending_spaces(limit: int = 10) -> List[Tuple[str, str]]:
|
| 1926 |
+
"""
|
| 1927 |
+
Fetch top trending spaces from HuggingFace Hub.
|
| 1928 |
+
|
| 1929 |
+
Returns a list of tuples: (display_name, space_id)
|
| 1930 |
+
Display name format: "space_name (category)"
|
| 1931 |
+
"""
|
| 1932 |
+
try:
|
| 1933 |
+
# Use the HuggingFace trending API endpoint for spaces
|
| 1934 |
+
response = requests.get("https://huggingface.co/api/trending?type=space")
|
| 1935 |
+
|
| 1936 |
+
if response.status_code != 200:
|
| 1937 |
+
print(f"Failed to fetch trending spaces: HTTP {response.status_code}")
|
| 1938 |
+
return [("Unable to load trending spaces", "")]
|
| 1939 |
+
|
| 1940 |
+
trending_data = response.json()
|
| 1941 |
+
|
| 1942 |
+
# The API returns {"recentlyTrending": [...]}
|
| 1943 |
+
recently_trending = trending_data.get("recentlyTrending", [])
|
| 1944 |
+
|
| 1945 |
+
if not recently_trending:
|
| 1946 |
+
print("No trending spaces found in API response")
|
| 1947 |
+
return [("No trending spaces available", "")]
|
| 1948 |
+
|
| 1949 |
+
trending_list = []
|
| 1950 |
+
count = 0
|
| 1951 |
+
|
| 1952 |
+
# Process trending items
|
| 1953 |
+
for item in recently_trending:
|
| 1954 |
+
if count >= limit:
|
| 1955 |
+
break
|
| 1956 |
+
|
| 1957 |
+
try:
|
| 1958 |
+
# Check if this is a space
|
| 1959 |
+
repo_type = item.get("repoType")
|
| 1960 |
+
if repo_type != "space":
|
| 1961 |
+
continue
|
| 1962 |
+
|
| 1963 |
+
# Extract space data
|
| 1964 |
+
repo_data = item.get("repoData", {})
|
| 1965 |
+
space_id = repo_data.get("id")
|
| 1966 |
+
|
| 1967 |
+
if not space_id:
|
| 1968 |
+
continue
|
| 1969 |
+
|
| 1970 |
+
# Get title and category
|
| 1971 |
+
title = repo_data.get("title") or space_id
|
| 1972 |
+
category = repo_data.get("ai_category") or repo_data.get("shortDescription", "Space")
|
| 1973 |
+
|
| 1974 |
+
# Create display name: "title (category)"
|
| 1975 |
+
# Truncate long titles
|
| 1976 |
+
if len(title) > 40:
|
| 1977 |
+
title = title[:37] + "..."
|
| 1978 |
+
|
| 1979 |
+
display_name = f"{title} ({category})"
|
| 1980 |
+
trending_list.append((display_name, space_id))
|
| 1981 |
+
count += 1
|
| 1982 |
+
|
| 1983 |
+
except Exception as space_error:
|
| 1984 |
+
print(f"Error processing trending space: {space_error}")
|
| 1985 |
+
continue
|
| 1986 |
+
|
| 1987 |
+
if not trending_list:
|
| 1988 |
+
return [("No spaces available", "")]
|
| 1989 |
+
|
| 1990 |
+
return trending_list
|
| 1991 |
+
|
| 1992 |
+
except Exception as e:
|
| 1993 |
+
print(f"Error fetching trending spaces: {e}")
|
| 1994 |
+
return [("Error loading spaces", "")]
|
| 1995 |
+
|
| 1996 |
+
|
| 1997 |
+
def import_space_from_hf(space_id: str) -> Tuple[str, str, str, str]:
|
| 1998 |
+
"""
|
| 1999 |
+
Import a HuggingFace space by ID and extract its code.
|
| 2000 |
+
|
| 2001 |
+
Returns: (status, code, language, space_url)
|
| 2002 |
+
"""
|
| 2003 |
+
if not space_id or space_id == "":
|
| 2004 |
+
return "Please select a space.", "", "html", ""
|
| 2005 |
+
|
| 2006 |
+
# Build space URL
|
| 2007 |
+
space_url = f"https://huggingface.co/spaces/{space_id}"
|
| 2008 |
+
|
| 2009 |
+
# Use existing load_project_from_url function
|
| 2010 |
+
status, code = load_project_from_url(space_url)
|
| 2011 |
+
|
| 2012 |
+
# Determine language based on code content
|
| 2013 |
+
code_lang = "html" # default
|
| 2014 |
+
language = "html" # for language dropdown
|
| 2015 |
+
|
| 2016 |
+
# Check imports to determine framework for Python code
|
| 2017 |
+
if is_streamlit_code(code):
|
| 2018 |
+
code_lang = "python"
|
| 2019 |
+
language = "streamlit"
|
| 2020 |
+
elif is_gradio_code(code):
|
| 2021 |
+
code_lang = "python"
|
| 2022 |
+
language = "gradio"
|
| 2023 |
+
elif "=== index.html ===" in code and "=== index.js ===" in code:
|
| 2024 |
+
code_lang = "html"
|
| 2025 |
+
language = "transformers.js"
|
| 2026 |
+
elif ("import " in code or "def " in code) and not ("<!DOCTYPE html>" in code or "<html" in code):
|
| 2027 |
+
code_lang = "python"
|
| 2028 |
+
language = "gradio" # Default to Gradio for Python spaces
|
| 2029 |
+
|
| 2030 |
+
return status, code, language, space_url
|
| 2031 |
+
|
| 2032 |
+
|
| 2033 |
+
def import_model_from_hf(model_id: str) -> Tuple[str, str, str, str]:
|
| 2034 |
+
"""
|
| 2035 |
+
Import a HuggingFace model by ID and extract code snippet.
|
| 2036 |
+
|
| 2037 |
+
Returns: (status, code, language, model_url)
|
| 2038 |
+
"""
|
| 2039 |
+
if not model_id or model_id == "":
|
| 2040 |
+
return "Please select a model.", "", "python", ""
|
| 2041 |
+
|
| 2042 |
+
# Build model URL
|
| 2043 |
+
model_url = f"https://huggingface.co/{model_id}"
|
| 2044 |
+
|
| 2045 |
+
# Use existing import_repo_to_app function
|
| 2046 |
+
status, code, _ = import_repo_to_app(model_url)
|
| 2047 |
+
|
| 2048 |
+
# Determine language - default to python for model imports
|
| 2049 |
+
language = "gradio" # Default framework for model demos
|
| 2050 |
+
|
| 2051 |
+
return status, code, language, model_url
|
| 2052 |
+
|
| 2053 |
+
|
| 2054 |
def import_repo_to_app(url: str, framework: str = "Gradio") -> Tuple[str, str, str]:
|
| 2055 |
"""Import a GitHub or HF model repo and return the raw code snippet from README/model card.
|
| 2056 |
|
anycoder_app/ui.py
CHANGED
|
@@ -26,7 +26,8 @@ from .deploy import (
|
|
| 26 |
generation_code, deploy_to_spaces, add_anycoder_tag_to_readme,
|
| 27 |
_parse_repo_or_model_url, load_project_from_url, check_hf_space_url,
|
| 28 |
import_repo_to_app, extract_import_statements,
|
| 29 |
-
generate_requirements_txt_with_llm, prettify_comfyui_json_for_html
|
|
|
|
| 30 |
)
|
| 31 |
|
| 32 |
# Main application with proper Gradio theming
|
|
@@ -116,6 +117,26 @@ with gr.Blocks(
|
|
| 116 |
load_project_btn = gr.Button("π₯ Import Project", variant="secondary", size="sm", visible=True)
|
| 117 |
load_project_status = gr.Markdown(visible=False)
|
| 118 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 119 |
# Chat history display in sidebar
|
| 120 |
chat_history = gr.Chatbot(
|
| 121 |
label="Conversation History",
|
|
@@ -1747,4 +1768,136 @@ CMD ["streamlit", "run", "streamlit_app.py", "--server.port=7860", "--server.add
|
|
| 1747 |
outputs=[input, btn],
|
| 1748 |
queue=False
|
| 1749 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1750 |
|
|
|
|
| 26 |
generation_code, deploy_to_spaces, add_anycoder_tag_to_readme,
|
| 27 |
_parse_repo_or_model_url, load_project_from_url, check_hf_space_url,
|
| 28 |
import_repo_to_app, extract_import_statements,
|
| 29 |
+
generate_requirements_txt_with_llm, prettify_comfyui_json_for_html,
|
| 30 |
+
get_trending_models, import_model_from_hf, get_trending_spaces, import_space_from_hf
|
| 31 |
)
|
| 32 |
|
| 33 |
# Main application with proper Gradio theming
|
|
|
|
| 117 |
load_project_btn = gr.Button("π₯ Import Project", variant="secondary", size="sm", visible=True)
|
| 118 |
load_project_status = gr.Markdown(visible=False)
|
| 119 |
|
| 120 |
+
# Trending HuggingFace Models section
|
| 121 |
+
trending_models_dropdown = gr.Dropdown(
|
| 122 |
+
label="π₯ Trending HuggingFace Models",
|
| 123 |
+
choices=[], # Will be populated on load
|
| 124 |
+
value=None,
|
| 125 |
+
interactive=True,
|
| 126 |
+
visible=True
|
| 127 |
+
)
|
| 128 |
+
trending_models_status = gr.Markdown(visible=False)
|
| 129 |
+
|
| 130 |
+
# Trending HuggingFace Spaces section
|
| 131 |
+
trending_spaces_dropdown = gr.Dropdown(
|
| 132 |
+
label="π Trending HuggingFace Spaces",
|
| 133 |
+
choices=[], # Will be populated on load
|
| 134 |
+
value=None,
|
| 135 |
+
interactive=True,
|
| 136 |
+
visible=True
|
| 137 |
+
)
|
| 138 |
+
trending_spaces_status = gr.Markdown(visible=False)
|
| 139 |
+
|
| 140 |
# Chat history display in sidebar
|
| 141 |
chat_history = gr.Chatbot(
|
| 142 |
label="Conversation History",
|
|
|
|
| 1768 |
outputs=[input, btn],
|
| 1769 |
queue=False
|
| 1770 |
)
|
| 1771 |
+
|
| 1772 |
+
# Load trending models when app starts
|
| 1773 |
+
def load_trending_models():
|
| 1774 |
+
"""Load trending models from HuggingFace Hub"""
|
| 1775 |
+
models = get_trending_models(limit=10)
|
| 1776 |
+
# Create choices list with display names and values as model IDs
|
| 1777 |
+
choices = [(display, model_id) for display, model_id in models]
|
| 1778 |
+
return gr.update(choices=choices)
|
| 1779 |
+
|
| 1780 |
+
demo.load(
|
| 1781 |
+
load_trending_models,
|
| 1782 |
+
inputs=[],
|
| 1783 |
+
outputs=[trending_models_dropdown],
|
| 1784 |
+
queue=False
|
| 1785 |
+
)
|
| 1786 |
+
|
| 1787 |
+
# Load trending spaces when app starts
|
| 1788 |
+
def load_trending_spaces():
|
| 1789 |
+
"""Load trending spaces from HuggingFace Hub"""
|
| 1790 |
+
spaces = get_trending_spaces(limit=10)
|
| 1791 |
+
# Create choices list with display names and values as space IDs
|
| 1792 |
+
choices = [(display, space_id) for display, space_id in spaces]
|
| 1793 |
+
return gr.update(choices=choices)
|
| 1794 |
+
|
| 1795 |
+
demo.load(
|
| 1796 |
+
load_trending_spaces,
|
| 1797 |
+
inputs=[],
|
| 1798 |
+
outputs=[trending_spaces_dropdown],
|
| 1799 |
+
queue=False
|
| 1800 |
+
)
|
| 1801 |
+
|
| 1802 |
+
# Handle trending model selection
|
| 1803 |
+
def handle_trending_model_selection(model_id, hist):
|
| 1804 |
+
"""Handle when user selects a trending model"""
|
| 1805 |
+
if not model_id or model_id == "":
|
| 1806 |
+
return [
|
| 1807 |
+
gr.update(value="Please select a model.", visible=True), # status
|
| 1808 |
+
gr.update(), # code_output
|
| 1809 |
+
gr.update(), # language_dropdown
|
| 1810 |
+
hist, # history
|
| 1811 |
+
history_to_chatbot_messages(hist), # history_output
|
| 1812 |
+
history_to_chatbot_messages(hist), # chat_history
|
| 1813 |
+
gr.update(value=None) # reset dropdown
|
| 1814 |
+
]
|
| 1815 |
+
|
| 1816 |
+
# Import the model
|
| 1817 |
+
status, code, language, model_url = import_model_from_hf(model_id)
|
| 1818 |
+
|
| 1819 |
+
# Add to history
|
| 1820 |
+
loaded_history = hist + [[f"Imported model: {model_id}", code]]
|
| 1821 |
+
|
| 1822 |
+
# Determine code language for display
|
| 1823 |
+
code_lang = "python"
|
| 1824 |
+
|
| 1825 |
+
return [
|
| 1826 |
+
gr.update(value=status, visible=True), # status
|
| 1827 |
+
gr.update(value=code, language=code_lang), # code_output
|
| 1828 |
+
gr.update(value=language), # language_dropdown
|
| 1829 |
+
loaded_history, # history
|
| 1830 |
+
history_to_chatbot_messages(loaded_history), # history_output
|
| 1831 |
+
history_to_chatbot_messages(loaded_history), # chat_history
|
| 1832 |
+
gr.update(value=None) # reset dropdown
|
| 1833 |
+
]
|
| 1834 |
+
|
| 1835 |
+
trending_models_dropdown.change(
|
| 1836 |
+
handle_trending_model_selection,
|
| 1837 |
+
inputs=[trending_models_dropdown, history],
|
| 1838 |
+
outputs=[
|
| 1839 |
+
trending_models_status,
|
| 1840 |
+
code_output,
|
| 1841 |
+
language_dropdown,
|
| 1842 |
+
history,
|
| 1843 |
+
history_output,
|
| 1844 |
+
chat_history,
|
| 1845 |
+
trending_models_dropdown
|
| 1846 |
+
]
|
| 1847 |
+
)
|
| 1848 |
+
|
| 1849 |
+
# Handle trending space selection
|
| 1850 |
+
def handle_trending_space_selection(space_id, hist):
|
| 1851 |
+
"""Handle when user selects a trending space"""
|
| 1852 |
+
if not space_id or space_id == "":
|
| 1853 |
+
return [
|
| 1854 |
+
gr.update(value="Please select a space.", visible=True), # status
|
| 1855 |
+
gr.update(), # code_output
|
| 1856 |
+
gr.update(), # language_dropdown
|
| 1857 |
+
hist, # history
|
| 1858 |
+
history_to_chatbot_messages(hist), # history_output
|
| 1859 |
+
history_to_chatbot_messages(hist), # chat_history
|
| 1860 |
+
gr.update(visible=True), # deploy_btn
|
| 1861 |
+
gr.update(value=None) # reset dropdown
|
| 1862 |
+
]
|
| 1863 |
+
|
| 1864 |
+
# Import the space
|
| 1865 |
+
status, code, language, space_url = import_space_from_hf(space_id)
|
| 1866 |
+
|
| 1867 |
+
# Add to history
|
| 1868 |
+
loaded_history = hist + [[f"Imported space: {space_id}", code]]
|
| 1869 |
+
|
| 1870 |
+
# Determine code language for display based on framework
|
| 1871 |
+
if language == "gradio" or language == "streamlit":
|
| 1872 |
+
code_lang = "python"
|
| 1873 |
+
elif language == "transformers.js":
|
| 1874 |
+
code_lang = "html"
|
| 1875 |
+
else:
|
| 1876 |
+
code_lang = "html"
|
| 1877 |
+
|
| 1878 |
+
return [
|
| 1879 |
+
gr.update(value=status, visible=True), # status
|
| 1880 |
+
gr.update(value=code, language=code_lang), # code_output
|
| 1881 |
+
gr.update(value=language), # language_dropdown
|
| 1882 |
+
loaded_history, # history
|
| 1883 |
+
history_to_chatbot_messages(loaded_history), # history_output
|
| 1884 |
+
history_to_chatbot_messages(loaded_history), # chat_history
|
| 1885 |
+
gr.update(value="Publish", visible=True), # deploy_btn
|
| 1886 |
+
gr.update(value=None) # reset dropdown
|
| 1887 |
+
]
|
| 1888 |
+
|
| 1889 |
+
trending_spaces_dropdown.change(
|
| 1890 |
+
handle_trending_space_selection,
|
| 1891 |
+
inputs=[trending_spaces_dropdown, history],
|
| 1892 |
+
outputs=[
|
| 1893 |
+
trending_spaces_status,
|
| 1894 |
+
code_output,
|
| 1895 |
+
language_dropdown,
|
| 1896 |
+
history,
|
| 1897 |
+
history_output,
|
| 1898 |
+
chat_history,
|
| 1899 |
+
deploy_btn,
|
| 1900 |
+
trending_spaces_dropdown
|
| 1901 |
+
]
|
| 1902 |
+
)
|
| 1903 |
|