Spaces:
Running
Running
Commit
·
8456bb8
1
Parent(s):
5fb65b7
docker changes
Browse files- requirements.txt +2 -1
- start.sh +2 -3
requirements.txt
CHANGED
|
@@ -2,4 +2,5 @@ fastapi
|
|
| 2 |
uvicorn[standard]
|
| 3 |
langchain-ollama
|
| 4 |
langgraph
|
| 5 |
-
pygsheets
|
|
|
|
|
|
| 2 |
uvicorn[standard]
|
| 3 |
langchain-ollama
|
| 4 |
langgraph
|
| 5 |
+
pygsheets
|
| 6 |
+
pandas
|
start.sh
CHANGED
|
@@ -13,7 +13,7 @@ export CUDA_VISIBLE_DEVICES=-1
|
|
| 13 |
ollama serve &
|
| 14 |
|
| 15 |
# Pull the model if not already present
|
| 16 |
-
echo "
|
| 17 |
if ! ollama list | grep -q "gemma3:4b"; then
|
| 18 |
ollama pull gemma3:4b
|
| 19 |
fi
|
|
@@ -30,7 +30,6 @@ while ! curl -s http://localhost:11434/api/tags >/dev/null; do
|
|
| 30 |
fi
|
| 31 |
done
|
| 32 |
|
| 33 |
-
echo "Ollama is Ready -
|
| 34 |
|
| 35 |
-
# Démarrer Application
|
| 36 |
python app.py
|
|
|
|
| 13 |
ollama serve &
|
| 14 |
|
| 15 |
# Pull the model if not already present
|
| 16 |
+
echo "gemma3:4b will be download"
|
| 17 |
if ! ollama list | grep -q "gemma3:4b"; then
|
| 18 |
ollama pull gemma3:4b
|
| 19 |
fi
|
|
|
|
| 30 |
fi
|
| 31 |
done
|
| 32 |
|
| 33 |
+
echo "Ollama is Ready - gemma3:4b is Loaded"
|
| 34 |
|
|
|
|
| 35 |
python app.py
|