ladybug11 commited on
Commit
7922c49
·
1 Parent(s): 49a0135
Files changed (1) hide show
  1. app.py +18 -7
app.py CHANGED
@@ -8,7 +8,7 @@ import time
8
  import shutil
9
 
10
  from openai import OpenAI
11
- from smolagents import CodeAgent, MCPClient, tool, HfApiModel
12
  from huggingface_hub import InferenceClient # still imported if you need it elsewhere
13
  from moviepy.editor import VideoFileClip, ImageClip, CompositeVideoClip, AudioFileClip
14
  from PIL import Image, ImageDraw, ImageFont
@@ -349,13 +349,24 @@ def create_quote_video_tool(video_url: str, quote_text: str, output_path: str, a
349
  def initialize_agent():
350
  """Initialize the CodeAgent with MCP capabilities"""
351
  try:
352
- # Use Hugging Face API model via HfApiModel
353
  hf_token = os.getenv("HF_TOKEN")
354
- model_id = os.getenv("HF_MODEL_ID", "meta-llama/Llama-3.3-70B-Instruct")
355
- model = HfApiModel(model_id=model_id, token=hf_token)
 
 
 
 
 
 
356
 
357
  agent = CodeAgent(
358
- tools=[generate_quote_tool, search_pexels_video_tool, generate_voice_commentary_tool, create_quote_video_tool],
 
 
 
 
 
359
  model=model,
360
  additional_authorized_imports=[
361
  "requests",
@@ -364,9 +375,9 @@ def initialize_agent():
364
  "tempfile",
365
  "os",
366
  "google.generativeai",
367
- "json"
368
  ],
369
- max_steps=15
370
  )
371
 
372
  if mcp_enabled:
 
8
  import shutil
9
 
10
  from openai import OpenAI
11
+ from smolagents import CodeAgent, MCPClient, tool
12
  from huggingface_hub import InferenceClient # still imported if you need it elsewhere
13
  from moviepy.editor import VideoFileClip, ImageClip, CompositeVideoClip, AudioFileClip
14
  from PIL import Image, ImageDraw, ImageFont
 
349
  def initialize_agent():
350
  """Initialize the CodeAgent with MCP capabilities"""
351
  try:
352
+ # Use Hugging Face Inference API for the agent's LLM
353
  hf_token = os.getenv("HF_TOKEN")
354
+ # If you have a specific model, you can set HF_MODEL_ID in your Space secrets
355
+ hf_model_id = os.getenv("HF_MODEL_ID") # e.g. "mistralai/Mixtral-8x7B-Instruct-v0.1"
356
+
357
+ if hf_model_id:
358
+ model = InferenceClient(model=hf_model_id, token=hf_token)
359
+ else:
360
+ # Fallback: rely on default model configured on the Space / org
361
+ model = InferenceClient(token=hf_token)
362
 
363
  agent = CodeAgent(
364
+ tools=[
365
+ generate_quote_tool,
366
+ search_pexels_video_tool,
367
+ generate_voice_commentary_tool,
368
+ create_quote_video_tool,
369
+ ],
370
  model=model,
371
  additional_authorized_imports=[
372
  "requests",
 
375
  "tempfile",
376
  "os",
377
  "google.generativeai",
378
+ "json",
379
  ],
380
+ max_steps=15,
381
  )
382
 
383
  if mcp_enabled: