EveSa commited on
Commit
da48c72
·
unverified ·
2 Parent(s): 58d4131 8dba466

Merge pull request #3 from EveSa/Eve

Browse files
Files changed (4) hide show
  1. Dockerfile +5 -1
  2. requirements.txt +0 -1
  3. src/api.py +10 -4
  4. src/model.py +1 -0
Dockerfile CHANGED
@@ -8,4 +8,8 @@ RUN pip install --no-cache-dir --upgrade -r requirements.txt
8
 
9
  COPY . .
10
 
11
- CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "7860"]
 
 
 
 
 
8
 
9
  COPY . .
10
 
11
+ EXPOSE 3001
12
+
13
+ #CMD ["uvicorn", "--app-dir", "./src", "api:app", "--host", "127.0.0.1", "--port", "8888"]
14
+
15
+ CMD python3 -m uvicorn --app-dir ./src api:app --host 0.0.0.0 --port 3001
requirements.txt CHANGED
@@ -6,7 +6,6 @@ idna==3.4
6
  Jinja2==3.1.2
7
  joblib==1.2.0
8
  MarkupSafe==2.1.2
9
- nltk==3.8.1
10
  numpy==1.24.2
11
  nvidia-cublas-cu11==11.10.3.66
12
  nvidia-cuda-nvrtc-cu11==11.7.99
 
6
  Jinja2==3.1.2
7
  joblib==1.2.0
8
  MarkupSafe==2.1.2
 
9
  numpy==1.24.2
10
  nvidia-cublas-cu11==11.10.3.66
11
  nvidia-cuda-nvrtc-cu11==11.7.99
src/api.py CHANGED
@@ -10,6 +10,8 @@ from inference import inferenceAPI
10
  # appel de la fonction inference, adaptee pour une entree txt
11
  def summarize(text: str):
12
  return " ".join(inferenceAPI(text))
 
 
13
  # ----------------------------------------------------------------------------------
14
 
15
 
@@ -20,23 +22,27 @@ app = FastAPI()
20
  templates = Jinja2Templates(directory="templates")
21
  app.mount("/templates", StaticFiles(directory="templates"), name="templates")
22
 
 
23
  @app.get("/")
24
  async def index(request: Request):
25
  return templates.TemplateResponse("index.html.jinja", {"request": request})
26
 
 
27
  # retourner le texte, les predictions et message d'erreur si formulaire envoye vide
28
  @app.post("/")
29
  async def prediction(request: Request, text: str = Form(None)):
30
- if not text :
31
  error = "Merci de saisir votre texte."
32
  return templates.TemplateResponse(
33
- "index.html.jinja", {"request": request, "text": error}
34
- )
35
- else :
36
  summary = summarize(text)
37
  return templates.TemplateResponse(
38
  "index.html.jinja", {"request": request, "text": text, "summary": summary}
39
  )
 
 
40
  # ------------------------------------------------------------------------------------
41
 
42
 
 
10
  # appel de la fonction inference, adaptee pour une entree txt
11
  def summarize(text: str):
12
  return " ".join(inferenceAPI(text))
13
+
14
+
15
  # ----------------------------------------------------------------------------------
16
 
17
 
 
22
  templates = Jinja2Templates(directory="templates")
23
  app.mount("/templates", StaticFiles(directory="templates"), name="templates")
24
 
25
+
26
  @app.get("/")
27
  async def index(request: Request):
28
  return templates.TemplateResponse("index.html.jinja", {"request": request})
29
 
30
+
31
  # retourner le texte, les predictions et message d'erreur si formulaire envoye vide
32
  @app.post("/")
33
  async def prediction(request: Request, text: str = Form(None)):
34
+ if not text:
35
  error = "Merci de saisir votre texte."
36
  return templates.TemplateResponse(
37
+ "index.html.jinja", {"request": request, "text": error}
38
+ )
39
+ else:
40
  summary = summarize(text)
41
  return templates.TemplateResponse(
42
  "index.html.jinja", {"request": request, "text": text, "summary": summary}
43
  )
44
+
45
+
46
  # ------------------------------------------------------------------------------------
47
 
48
 
src/model.py CHANGED
@@ -8,6 +8,7 @@ import torch
8
 
9
  logging.basicConfig(level=logging.DEBUG)
10
 
 
11
  class Encoder(torch.nn.Module):
12
  def __init__(
13
  self,
 
8
 
9
  logging.basicConfig(level=logging.DEBUG)
10
 
11
+
12
  class Encoder(torch.nn.Module):
13
  def __init__(
14
  self,