Amaranath commited on
Commit
26ff678
Β·
verified Β·
1 Parent(s): 870f943

Update start.sh

Browse files
Files changed (1) hide show
  1. start.sh +17 -27
start.sh CHANGED
@@ -3,50 +3,40 @@ set -e
3
 
4
  echo "πŸš€ Starting Ollama + FastAPI Server..."
5
 
6
- # Set Ollama environment variables
7
- export OLLAMA_HOME=/app/.ollama
 
8
  export OLLAMA_HOST=0.0.0.0:11434
9
  export OLLAMA_ORIGINS="*"
10
- export OLLAMA_KEEP_ALIVE=5m
11
 
 
 
12
 
13
- # Create Ollama directory if it doesn't exist
14
- mkdir -p /app/.ollama
15
- chmod 755 /app/.ollama
16
-
17
- # Start Ollama in background
18
  echo "πŸ“‘ Starting Ollama service..."
19
- nohup ollama serve > /app/ollama.log 2>&1 &
20
  OLLAMA_PID=$!
21
 
22
- # Wait for Ollama to be ready
23
  echo "⏳ Waiting for Ollama to be ready..."
24
- for i in {1..60}; do # Increased timeout to 60 attempts
25
  if curl -f http://127.0.0.1:11434/api/tags >/dev/null 2>&1; then
26
  echo "βœ… Ollama is ready!"
27
  break
28
  fi
29
- echo "πŸ”„ Attempt $i/60 - Waiting for Ollama..."
30
- sleep 2
31
  done
32
 
33
-
34
- # Check if Ollama started successfully
35
  if curl -f http://127.0.0.1:11434/api/tags >/dev/null 2>&1; then
36
- echo "βœ… Ollama is running!"
 
 
 
 
37
 
38
- # Try to pull model in background (don't block startup)
39
- echo "πŸ“₯ Starting model download in background..."
40
- nohup ollama pull llama3.2:1b > /app/pull.log 2>&1 &
41
- echo "πŸ“ Model download started - check /app/pull.log for progress"
42
  else
43
- echo "⚠️ Ollama not responding - check /app/ollama.log for details"
44
  fi
45
 
46
- echo "πŸŽ‰ Setup complete!"
47
- echo "πŸ“‘ Ollama API: http://localhost:11434"
48
- echo "πŸ“– FastAPI: http://localhost:7860"
49
-
50
- # Start FastAPI in foreground (keeps container alive)
51
  echo "πŸš€ Starting FastAPI server..."
52
- python app.py
 
3
 
4
  echo "πŸš€ Starting Ollama + FastAPI Server..."
5
 
6
+ # Use /tmp directory (always writable, no chmod needed)
7
+ export OLLAMA_HOME=/tmp/ollama
8
+ export OLLAMA_MODELS=/tmp/ollama/models
9
  export OLLAMA_HOST=0.0.0.0:11434
10
  export OLLAMA_ORIGINS="*"
 
11
 
12
+ # Create directories in /tmp (no chmod needed - /tmp is always writable)
13
+ mkdir -p /tmp/ollama/models
14
 
 
 
 
 
 
15
  echo "πŸ“‘ Starting Ollama service..."
16
+ ollama serve > /tmp/ollama.log 2>&1 &
17
  OLLAMA_PID=$!
18
 
 
19
  echo "⏳ Waiting for Ollama to be ready..."
20
+ for i in {1..45}; do
21
  if curl -f http://127.0.0.1:11434/api/tags >/dev/null 2>&1; then
22
  echo "βœ… Ollama is ready!"
23
  break
24
  fi
25
+ echo "πŸ”„ Attempt $i/45 - Waiting for Ollama..."
26
+ sleep 3
27
  done
28
 
29
+ # Check final status
 
30
  if curl -f http://127.0.0.1:11434/api/tags >/dev/null 2>&1; then
31
+ echo "πŸŽ‰ Ollama API is responding!"
32
+
33
+ # Download model in background
34
+ echo "πŸ“₯ Downloading Llama 3.2 1B model..."
35
+ (ollama pull llama3.2:1b > /tmp/pull.log 2>&1 && echo "βœ… Model ready!") &
36
 
 
 
 
 
37
  else
38
+ echo "⚠️ Ollama not ready, check logs at /tmp/ollama.log"
39
  fi
40
 
 
 
 
 
 
41
  echo "πŸš€ Starting FastAPI server..."
42
+ python app.py