fugthchat commited on
Commit
9611d3b
·
verified ·
1 Parent(s): 7a5ce20

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +6 -25
Dockerfile CHANGED
@@ -1,31 +1,12 @@
1
- # Use a standard Python slim image
2
  FROM python:3.10-slim
3
 
4
- # Set the working directory in the container
5
- WORKDIR /code
6
 
7
- # Set environment variable to build llama-cpp-python for CPU only
8
- # This avoids errors on Hugging Face's CPU infrastructure
9
- ENV CMAKE_ARGS="-DLLAMA_CUBLAS=OFF -DLLAMA_CUDA_F16=OFF -DLLAMA_HIPBLAS=OFF -DLLAMA_METAL=OFF"
10
- ENV FORCE_CMAKE=1
11
 
12
- # --- START OF FIX ---
13
- # Install build-essential (for gcc, g++) and cmake before installing python packages
14
- RUN apt-get update && apt-get install -y build-essential cmake
15
- # --- END OF FIX ---
16
 
17
- # Copy the requirements file into the container
18
- COPY ./requirements.txt /code/requirements.txt
19
-
20
- # Install the Python dependencies
21
- RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
22
-
23
- # Copy the rest of the application code
24
- COPY ./app.py /code/app.py
25
-
26
- # Expose the port the app runs on (Hugging Face default is 7860)
27
  EXPOSE 7860
28
-
29
- # Command to run the application using uvicorn
30
- # The app will be available at http://0.0.0.0:7860
31
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
 
 
1
  FROM python:3.10-slim
2
 
3
+ RUN apt-get update && apt-get install -y git wget && rm -rf /var/lib/apt/lists/*
 
4
 
5
+ # Install llama.cpp bindings
6
+ RUN pip install flask llama-cpp-python==0.2.80
 
 
7
 
8
+ WORKDIR /app
9
+ COPY . /app
 
 
10
 
 
 
 
 
 
 
 
 
 
 
11
  EXPOSE 7860
12
+ CMD ["python", "app.py"]