ThomasBlumet commited on
Commit
437cdee
·
1 Parent(s): f0da92e

add app file and the docker image

Browse files
Files changed (3) hide show
  1. Dockerfile +22 -0
  2. app.py +18 -0
  3. requirements.txt +4 -0
Dockerfile ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # For more information, please refer to https://aka.ms/vscode-docker-python
2
+ FROM python:3.9-slim
3
+
4
+ # Keeps Python from generating .pyc files in the container
5
+ ENV PYTHONDONTWRITEBYTECODE=1
6
+
7
+ # Turns off buffering for easier container logging
8
+ ENV PYTHONUNBUFFERED=1
9
+ # Where we'll copy the code
10
+ WORKDIR /code
11
+
12
+ # Install pip requirements
13
+ COPY requirements.txt /code/requirements.txt
14
+ RUN python -m pip install --no-cache-dir --upgrade -r /code/requirements.txt
15
+
16
+ # Creates a non-root user with an explicit UID and adds permission to access the /code folder
17
+ # For more info, please refer to https://aka.ms/vscode-docker-python-configure-containers
18
+ RUN useradd -u 1000 --disabled-password --gecos "" appuser && chown -R appuser /code
19
+ USER appuser
20
+
21
+ # During debugging, this entry point will be overridden. For more information, please refer to https://aka.ms/vscode-docker-python-debug
22
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import BartForConditionalGeneration, BartTokenizer
2
+ import gradio as gr
3
+
4
+ # Charger le modèle BART et le tokenizer
5
+ model_name = "facebook/bart-large-cnn"
6
+ tokenizer = BartTokenizer.from_pretrained(model_name)
7
+ model = BartForConditionalGeneration.from_pretrained(model_name)
8
+
9
+ # Fonction pour générer du texte
10
+ def generate_text(prompt):
11
+ inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=512)
12
+ summary_ids = model.generate(inputs["input_ids"], max_length=150, min_length=40, length_penalty=2.0, num_beams=4, early_stopping=True)
13
+ return tokenizer.decode(summary_ids[0], skip_special_tokens=True)
14
+
15
+ # Créer une interface de saisie avec Gradio
16
+ interface = gr.Interface(fn=generate_text, inputs="text", outputs="text",title="TeLLMyStory",description="Enter your story idea and the model will generate the story based on it.")
17
+ #Lancer l'interface
18
+ interface.launch()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ transformers
2
+ torch
3
+ gradio
4
+ huggingface_hub