Amaranath commited on
Commit
0c64de0
Β·
verified Β·
1 Parent(s): a9fe5b6

Update start.sh

Browse files
Files changed (1) hide show
  1. start.sh +37 -21
start.sh CHANGED
@@ -3,40 +3,56 @@ set -e
3
 
4
  echo "πŸš€ Starting Ollama + FastAPI Server..."
5
 
6
- # Use /tmp directory (always writable, no chmod needed)
7
  export OLLAMA_HOME=/tmp/ollama
8
- export OLLAMA_MODELS=/tmp/ollama/models
9
  export OLLAMA_HOST=0.0.0.0:11434
10
  export OLLAMA_ORIGINS="*"
11
 
12
- # Create directories in /tmp (no chmod needed - /tmp is always writable)
13
- mkdir -p /tmp/ollama/models
14
 
15
- echo "πŸ“‘ Starting Ollama service..."
 
 
 
 
 
 
 
 
16
  ollama serve > /tmp/ollama.log 2>&1 &
17
  OLLAMA_PID=$!
 
 
 
 
 
 
 
 
 
 
 
 
 
18
 
19
- echo "⏳ Waiting for Ollama to be ready..."
20
- for i in {1..45}; do
 
 
 
 
 
21
  if curl -f http://127.0.0.1:11434/api/tags >/dev/null 2>&1; then
22
  echo "βœ… Ollama is ready!"
23
  break
 
 
 
 
24
  fi
25
- echo "πŸ”„ Attempt $i/45 - Waiting for Ollama..."
26
  sleep 3
27
  done
28
 
29
- # Check final status
30
- if curl -f http://127.0.0.1:11434/api/tags >/dev/null 2>&1; then
31
- echo "πŸŽ‰ Ollama API is responding!"
32
-
33
- # Download model in background
34
- echo "πŸ“₯ Downloading Llama 3.2 1B model..."
35
- (ollama pull llama3.2:1b > /tmp/pull.log 2>&1 && echo "βœ… Model ready!") &
36
-
37
- else
38
- echo "⚠️ Ollama not ready, check logs at /tmp/ollama.log"
39
- fi
40
-
41
- echo "πŸš€ Starting FastAPI server..."
42
  python app.py
 
3
 
4
  echo "πŸš€ Starting Ollama + FastAPI Server..."
5
 
6
+ # Set environment
7
  export OLLAMA_HOME=/tmp/ollama
 
8
  export OLLAMA_HOST=0.0.0.0:11434
9
  export OLLAMA_ORIGINS="*"
10
 
11
+ # Create directory
12
+ mkdir -p /tmp/ollama
13
 
14
+ echo "πŸ” Debugging Ollama startup..."
15
+ echo "πŸ“ Ollama location: $(which ollama)"
16
+ echo "πŸ“ Ollama version: $(ollama --version || echo 'Version check failed')"
17
+ echo "πŸ“ Environment:"
18
+ echo " OLLAMA_HOME=$OLLAMA_HOME"
19
+ echo " OLLAMA_HOST=$OLLAMA_HOST"
20
+
21
+ # Try to start Ollama with verbose output
22
+ echo "πŸ“‘ Starting Ollama service with debug output..."
23
  ollama serve > /tmp/ollama.log 2>&1 &
24
  OLLAMA_PID=$!
25
+ echo "πŸ“ Ollama PID: $OLLAMA_PID"
26
+
27
+ # Give it more time and check process
28
+ sleep 5
29
+ echo "πŸ” Checking Ollama process..."
30
+ if ps -p $OLLAMA_PID > /dev/null 2>&1; then
31
+ echo "βœ… Ollama process is running"
32
+ else
33
+ echo "❌ Ollama process died, checking logs..."
34
+ echo "--- /tmp/ollama.log contents ---"
35
+ cat /tmp/ollama.log || echo "No log file found"
36
+ echo "--- End of logs ---"
37
+ fi
38
 
39
+ # Check what's listening on port 11434
40
+ echo "πŸ” Checking port 11434..."
41
+ netstat -ln | grep 11434 || echo "No process listening on 11434"
42
+
43
+ # Try connection test
44
+ echo "⏳ Testing Ollama connection..."
45
+ for i in {1..10}; do
46
  if curl -f http://127.0.0.1:11434/api/tags >/dev/null 2>&1; then
47
  echo "βœ… Ollama is ready!"
48
  break
49
+ else
50
+ echo "πŸ”„ Attempt $i/10 - Connection failed"
51
+ # Show what curl actually returns
52
+ curl -v http://127.0.0.1:11434/api/tags 2>&1 || true
53
  fi
 
54
  sleep 3
55
  done
56
 
57
+ echo "πŸš€ Starting FastAPI anyway..."
 
 
 
 
 
 
 
 
 
 
 
 
58
  python app.py