Spaces:
Sleeping
Sleeping
Commit
·
b16ca60
1
Parent(s):
fe819e8
Update Dockerfile
Browse files- Dockerfile +14 -9
Dockerfile
CHANGED
@@ -1,5 +1,7 @@
|
|
|
|
1 |
FROM python:3.9-slim
|
2 |
|
|
|
3 |
WORKDIR /code
|
4 |
|
5 |
# Install system dependencies
|
@@ -7,34 +9,37 @@ RUN apt-get update && apt-get install -y \
|
|
7 |
build-essential \
|
8 |
&& rm -rf /var/lib/apt/lists/*
|
9 |
|
10 |
-
# Copy requirements first for better caching
|
11 |
COPY requirements.txt .
|
12 |
RUN pip install --no-cache-dir -r requirements.txt
|
13 |
|
14 |
-
# Install sentencepiece
|
15 |
RUN pip install sentencepiece
|
16 |
|
17 |
-
# Create
|
18 |
RUN mkdir -p /code/nltk_data && chmod -R 777 /code/nltk_data
|
19 |
-
|
20 |
-
# Set the NLTK_DATA environment variable to the created directory
|
21 |
ENV NLTK_DATA=/code/nltk_data
|
22 |
|
23 |
# Download punkt data for NLTK
|
24 |
RUN python -c "import nltk; nltk.download('punkt')"
|
25 |
|
26 |
-
#
|
27 |
RUN mkdir -p /code/transformers_cache && chmod -R 777 /code/transformers_cache
|
28 |
ENV TRANSFORMERS_CACHE=/code/transformers_cache
|
|
|
29 |
|
30 |
-
# Download
|
31 |
RUN python -c "from sentence_transformers import SentenceTransformer; SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')"
|
32 |
RUN python -c "from sentence_transformers import CrossEncoder; CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2')"
|
33 |
|
34 |
-
# Copy your code
|
35 |
COPY . .
|
36 |
|
37 |
-
#
|
|
|
|
|
|
|
38 |
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
39 |
|
|
|
40 |
ENV TRANSFORMERS_CACHE=/code/transformers_cache
|
|
|
1 |
+
# Use python:3.9-slim as the base image
|
2 |
FROM python:3.9-slim
|
3 |
|
4 |
+
# Set the working directory inside the container
|
5 |
WORKDIR /code
|
6 |
|
7 |
# Install system dependencies
|
|
|
9 |
build-essential \
|
10 |
&& rm -rf /var/lib/apt/lists/*
|
11 |
|
12 |
+
# Copy requirements.txt first for better caching
|
13 |
COPY requirements.txt .
|
14 |
RUN pip install --no-cache-dir -r requirements.txt
|
15 |
|
16 |
+
# Install sentencepiece for tokenization
|
17 |
RUN pip install sentencepiece
|
18 |
|
19 |
+
# Create and set permissions for the NLTK data directory
|
20 |
RUN mkdir -p /code/nltk_data && chmod -R 777 /code/nltk_data
|
|
|
|
|
21 |
ENV NLTK_DATA=/code/nltk_data
|
22 |
|
23 |
# Download punkt data for NLTK
|
24 |
RUN python -c "import nltk; nltk.download('punkt')"
|
25 |
|
26 |
+
# Create and set permissions for the Transformers cache directory
|
27 |
RUN mkdir -p /code/transformers_cache && chmod -R 777 /code/transformers_cache
|
28 |
ENV TRANSFORMERS_CACHE=/code/transformers_cache
|
29 |
+
ENV HF_HOME=/code/transformers_cache # Adding HF_HOME for Hugging Face cache
|
30 |
|
31 |
+
# Download sentence-transformers model to avoid recreating it at runtime
|
32 |
RUN python -c "from sentence_transformers import SentenceTransformer; SentenceTransformer('sentence-transformers/all-MiniLM-L6-v2')"
|
33 |
RUN python -c "from sentence_transformers import CrossEncoder; CrossEncoder('cross-encoder/ms-marco-MiniLM-L-6-v2')"
|
34 |
|
35 |
+
# Copy your application code into the container
|
36 |
COPY . .
|
37 |
|
38 |
+
# Expose port 7860 for FastAPI
|
39 |
+
EXPOSE 7860
|
40 |
+
|
41 |
+
# Command to run FastAPI using Uvicorn
|
42 |
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
43 |
|
44 |
+
# Set environment variable for cache location
|
45 |
ENV TRANSFORMERS_CACHE=/code/transformers_cache
|