VicGerardoPR commited on
Commit
c58b2b6
·
verified ·
1 Parent(s): 1a10b01

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -12
app.py CHANGED
@@ -1,24 +1,22 @@
1
  import streamlit as st
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
 
 
4
 
5
- # Cargar el modelo y el tokenizador
6
- @st.cache(allow_output_mutation=True)
7
- def load_model():
8
- model_name = "meta-llama/Meta-Llama-Guard-2-8B" # Reemplaza con el nombre del modelo que vas a usar
9
- use_auth_token = 'YOUR_HUGGING_FACE_TOKEN'
10
- tokenizer = AutoTokenizer.from_pretrained(model_name, use_auth_token=use_auth_token)
11
- model = AutoModelForCausalLM.from_pretrained(model_name, use_auth_token=use_auth_token)
12
- return tokenizer, model
13
 
14
- tokenizer, model = load_model()
 
 
15
 
16
  st.title("LLaMA Chatbot")
17
  st.subheader("Ask anything to the LLaMA model!")
18
 
19
  user_input = st.text_input("You: ")
20
  if user_input:
21
- inputs = tokenizer(user_input, return_tensors="pt")
22
- outputs = model.generate(inputs.input_ids, max_length=150, num_return_sequences=1)
23
- response = tokenizer.decode(outputs[0], skip_special_tokens=True)
24
  st.write(f"Chatbot: {response}")
 
1
  import streamlit as st
2
  from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
4
+ import requests
5
+ import os
6
 
7
+ # Obtener el token de los secretos
8
+ API_URL = "https://api-inference.huggingface.co/models/meta-llama/Meta-Llama-Guard-2-8B"
9
+ headers = {"Authorization": f"Bearer {os.getenv('YOUR_HUGGING_FACE_TOKEN')}"}
 
 
 
 
 
10
 
11
+ def query(payload):
12
+ response = requests.post(API_URL, headers=headers, json=payload)
13
+ return response.json()
14
 
15
  st.title("LLaMA Chatbot")
16
  st.subheader("Ask anything to the LLaMA model!")
17
 
18
  user_input = st.text_input("You: ")
19
  if user_input:
20
+ output = query({"inputs": user_input})
21
+ response = output.get("generated_text", "Sorry, I couldn't generate a response.")
 
22
  st.write(f"Chatbot: {response}")