sachin
commited on
Commit
·
bfbe1ae
1
Parent(s):
7529a73
download-modesl
Browse files- Dockerfile +1 -5
- Dockerfile.app +1 -0
- Dockerfile.models +1 -1
- download_models.py +1 -1
Dockerfile
CHANGED
@@ -1,11 +1,6 @@
|
|
1 |
FROM slabstech/dhwani-server-base
|
2 |
WORKDIR /app
|
3 |
|
4 |
-
COPY dhwani_config.json .
|
5 |
-
|
6 |
-
# Create a directory for pre-downloaded models
|
7 |
-
|
8 |
-
|
9 |
RUN mkdir -p /app/models
|
10 |
|
11 |
# Define build argument for HF_TOKEN
|
@@ -18,6 +13,7 @@ ENV HF_TOKEN=$HF_TOKEN_DOCKER
|
|
18 |
COPY download_models.py .
|
19 |
RUN python download_models.py
|
20 |
|
|
|
21 |
COPY . .
|
22 |
|
23 |
# Set up user
|
|
|
1 |
FROM slabstech/dhwani-server-base
|
2 |
WORKDIR /app
|
3 |
|
|
|
|
|
|
|
|
|
|
|
4 |
RUN mkdir -p /app/models
|
5 |
|
6 |
# Define build argument for HF_TOKEN
|
|
|
13 |
COPY download_models.py .
|
14 |
RUN python download_models.py
|
15 |
|
16 |
+
COPY dhwani_config.json .
|
17 |
COPY . .
|
18 |
|
19 |
# Set up user
|
Dockerfile.app
CHANGED
@@ -2,6 +2,7 @@
|
|
2 |
FROM slabstech/dhwani-model-server:latest
|
3 |
WORKDIR /app
|
4 |
|
|
|
5 |
# Copy application code
|
6 |
COPY . .
|
7 |
|
|
|
2 |
FROM slabstech/dhwani-model-server:latest
|
3 |
WORKDIR /app
|
4 |
|
5 |
+
COPY dhwani_config.json .
|
6 |
# Copy application code
|
7 |
COPY . .
|
8 |
|
Dockerfile.models
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
# Base image with CUDA support
|
2 |
-
FROM
|
3 |
|
4 |
|
5 |
# Create a directory for pre-downloaded models
|
|
|
1 |
# Base image with CUDA support
|
2 |
+
FROM slabstech/dhwani-model-server:latest
|
3 |
|
4 |
|
5 |
# Create a directory for pre-downloaded models
|
download_models.py
CHANGED
@@ -12,7 +12,7 @@ if not hf_token:
|
|
12 |
models = {
|
13 |
#'llm_model': ('google/gemma-3-4b-it', Gemma3ForConditionalGeneration, AutoProcessor),
|
14 |
#'tts_model': ('ai4bharat/IndicF5', AutoModel, None),
|
15 |
-
'asr_model': ('ai4bharat/indic-conformer-600m-multilingual', AutoModel, None),
|
16 |
'trans_en_indic': ('ai4bharat/indictrans2-en-indic-dist-200M', AutoModelForSeq2SeqLM, AutoTokenizer),
|
17 |
'trans_indic_en': ('ai4bharat/indictrans2-indic-en-dist-200M', AutoModelForSeq2SeqLM, AutoTokenizer),
|
18 |
'trans_indic_indic': ('ai4bharat/indictrans2-indic-indic-dist-320M', AutoModelForSeq2SeqLM, AutoTokenizer),
|
|
|
12 |
models = {
|
13 |
#'llm_model': ('google/gemma-3-4b-it', Gemma3ForConditionalGeneration, AutoProcessor),
|
14 |
#'tts_model': ('ai4bharat/IndicF5', AutoModel, None),
|
15 |
+
#'asr_model': ('ai4bharat/indic-conformer-600m-multilingual', AutoModel, None),
|
16 |
'trans_en_indic': ('ai4bharat/indictrans2-en-indic-dist-200M', AutoModelForSeq2SeqLM, AutoTokenizer),
|
17 |
'trans_indic_en': ('ai4bharat/indictrans2-indic-en-dist-200M', AutoModelForSeq2SeqLM, AutoTokenizer),
|
18 |
'trans_indic_indic': ('ai4bharat/indictrans2-indic-indic-dist-320M', AutoModelForSeq2SeqLM, AutoTokenizer),
|