ling-series-spaces / config.py
GitHub Action
Sync ling-space changes from GitHub commit d5d4701
439ab17
raw
history blame
4.69 kB
"""
Configuration file for the Ling Spaces application.
This file centralizes all the configuration variables, such as API endpoints,
API keys, and system prompts for different functionalities.
"""
import os
from dotenv import load_dotenv
# Load environment variables from .secrets file
load_dotenv(dotenv_path='.secrets')
# --- API Configuration ---
# API endpoint for OpenAI compatible services
OPEN_AI_ENTRYPOINT = os.getenv("OPEN_AI_ENTRYPOINT") or "https://api.openai.com/v1"
# API key for OpenAI compatible services
OPEN_AI_KEY = os.getenv("OPEN_AI_KEY")
# Brand name of the OpenAI compatible provider
OPEN_AI_PROVIDER = os.getenv("OPEN_AI_PROVIDER") or "OpenAI Compatible API"
# Fallback/warning for API keys
if not OPEN_AI_KEY:
print("⚠️ Warning: OPEN_AI_KEY is not set. Remote models may not function correctly.")
if not OPEN_AI_ENTRYPOINT:
print("⚠️ Warning: OPEN_AI_ENTRYPOINT is not set. Using default: https://api.openai.com/v1")
# --- Model Specifications ---
# Constants for easy referencing of models
LING_MINI_2_0 = "ling-mini-2.0"
LING_1T = "ling-1t"
LING_FLASH_2_0 = "ling-flash-2.0"
RING_1T = "ring-1t"
RING_FLASH_2_0 = "ring-flash-2.0"
RING_MINI_2_0 = "ring-mini-2.0"
CHAT_MODEL_SPECS = {
LING_MINI_2_0: {
"provider": "openai_compatible",
"model_id": "inclusionai/ling-mini-2.0",
"display_name": "🦉 Ling-mini-2.0",
"description": "轻量级对话模型,专为消费级硬件的高效运行而优化,是移动端或本地化部署场景的理想选择。",
"url": "https://huggingface.co/inclusionai"
},
LING_1T: {
"provider": "openai_compatible",
"model_id": "inclusionai/ling-1t",
"display_name": "🦉 Ling-1T",
"description": "万亿参数的大型语言模型,专为需要极致性能和高流畅度的复杂自然语言理解与生成任务而设计。",
"url": "https://huggingface.co/inclusionai"
},
LING_FLASH_2_0: {
"provider": "openai_compatible",
"model_id": "inclusionai/ling-flash-2.0",
"display_name": "🦉 Ling-flash-2.0",
"description": "高性能十亿参数模型,针对需要高速响应和复杂指令遵循的场景进行了优化。",
"url": "https://huggingface.co/inclusionai"
},
RING_1T: {
"provider": "openai_compatible",
"model_id": "inclusionai/ring-1t",
"display_name": "💍️ Ring-1T",
"description": "全新的万亿参数推理模型,具备强大的代码生成和工具使用能力。",
"url": "https://huggingface.co/inclusionai"
},
RING_FLASH_2_0: {
"provider": "openai_compatible",
"model_id": "inclusionai/ring-flash-2.0",
"display_name": "💍️ Ring-flash-2.0",
"description": "十亿参数推理模型,在性能与成本之间取得了良好的平衡,适用于需要逐步思考或代码生成的通用任务。",
"url": "https://huggingface.co/inclusionai"
},
RING_MINI_2_0: {
"provider": "openai_compatible",
"model_id": "inclusionai/ring-mini-2.0",
"display_name": "💍️ Ring-mini-2.0",
"description": "一款专为资源受限环境设计的量化且极其高效的推理模型,满足严格的速度和效率要求(如边缘计算)。",
"url": "https://huggingface.co/inclusionai"
}
}
# --- Code Framework Specifications ---
# Constants for easy referencing of code frameworks
STATIC_PAGE = "static_page"
GRADIO_APP = "gradio_app"
CODE_FRAMEWORK_SPECS = {
STATIC_PAGE: {
"display_name": "静态页面",
"description": "生成一个独立的、响应式的 HTML 文件,包含所有必要的 CSS 和 JavaScript。适合快速原型和简单的网页展示。"
}
}
# --- Utility Functions ---
_current_provider_name = OPEN_AI_PROVIDER
def set_current_provider(provider_name: str):
"""Sets the current API provider name."""
global _current_provider_name
_current_provider_name = provider_name
def get_current_provider_name() -> str:
"""Returns the current API provider name."""
return _current_provider_name
def get_model_id(model_constant: str) -> str:
"""
Retrieves the internal model ID for a given model constant.
This is typically what's passed to the underlying API.
"""
return CHAT_MODEL_SPECS.get(model_constant, {}).get("model_id", model_constant)
def get_model_display_name(model_constant: str) -> str:
"""
Retrieves the display name for a given model constant.
This is what's shown in the UI.
"""
return CHAT_MODEL_SPECS.get(model_constant, {}).get("display_name", model_constant)