Aleksey22 commited on
Commit
724cc35
1 Parent(s): e910817

Add application file

Browse files
Files changed (3) hide show
  1. Dockerfile +16 -0
  2. app.py +28 -0
  3. requirements.txt +5 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # read the doc: https://huggingface.co/docs/hub/spaces-sdks-docker
2
+ # you will also find guides on how best to write your Dockerfile
3
+
4
+ FROM python:3.9
5
+
6
+ RUN useradd -m -u 1000 user
7
+ USER user
8
+ ENV PATH="/home/user/.local/bin:/opt/homebrew/bin:/Library/Frameworks/Python.framework/Versions/3.11/bin:/Library/Frameworks/Python.framework/Versions/3.10/bin:/usr/local/bin:/System/Cryptexes/App/usr/bin:/usr/bin:/bin:/usr/sbin:/sbin:/var/run/com.apple.security.cryptexd/codex.system/bootstrap/usr/local/bin:/var/run/com.apple.security.cryptexd/codex.system/bootstrap/usr/bin:/var/run/com.apple.security.cryptexd/codex.system/bootstrap/usr/appleinternal/bin:/Library/Apple/usr/bin:/Library/Frameworks/Mono.framework/Versions/Current/Commands:/usr/local/mysql/bin:/opt/homebrew/bin:/Library/Frameworks/Python.framework/Versions/3.11/bin:/Library/Frameworks/Python.framework/Versions/3.10/bin"
9
+
10
+ WORKDIR /app
11
+
12
+ COPY --chown=user ./requirements.txt requirements.txt
13
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
14
+
15
+ COPY --chown=user . /app
16
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForCausalLM
2
+ from fastapi import FastAPI
3
+ from pydantic import BaseModel
4
+ import uvicorn
5
+
6
+ # Cargar el modelo y el tokenizador
7
+ model_name = "Bin12345/AutoCoder_QW_7B"
8
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
9
+ model = AutoModelForCausalLM.from_pretrained(model_name)
10
+
11
+ # Definir la clase para la solicitud
12
+ class Message(BaseModel):
13
+ content: str
14
+
15
+ # Crear la aplicaci贸n FastAPI
16
+ app = FastAPI()
17
+
18
+ # Definir el endpoint para la generaci贸n de texto
19
+ @app.post("/generate/")
20
+ async def generate_text(message: Message):
21
+ inputs = tokenizer.encode(message.content, return_tensors='pt')
22
+ outputs = model.generate(inputs, max_length=50, num_return_sequences=1)
23
+ generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
24
+ return {"generated_text": generated_text}
25
+
26
+ # Ejecutar la aplicaci贸n con uvicorn
27
+ if __name__ == "__main__":
28
+ uvicorn.run(app, host="0.0.0.0", port=7860)
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ fastapi
2
+ uvicorn[standard]
3
+ transformers
4
+ torch
5
+ pydantic