Spaces:
Runtime error
Runtime error
File size: 5,646 Bytes
8795cec 088992d 3973fbd 088992d 8795cec d49eff8 de84611 088992d de84611 3973fbd 088992d 9eb46be de84611 819ef8a 95db923 819ef8a 8f92ea1 fde71d7 d49eff8 3973fbd 5a05e23 9eb46be 5a05e23 088992d 5a05e23 088992d bc59169 088992d 5a05e23 de84611 8795cec d49eff8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 |
import gradio as gr
from huggingface_hub import InferenceClient
import csv
import json
import matplotlib.pyplot as plt
import tempfile
# Par谩metros generales
r = 0.3 # Tasa de crecimiento
K = 1000 # Capacidad de carga
T = 20 # Tiempo total de simulaci贸n
# Funci贸n log铆stica
def logistic_growth(N, r, K):
return r * N * (1 - N / K)
# Funci贸n para simular el crecimiento
def simulate_population(t_values, initial_population, num_simulations):
population = np.zeros((len(t_values), num_simulations))
population[0] = initial_population
for t in range(1, len(t_values)):
for sim in range(num_simulations):
population[t, sim] = population[t-1, sim] + logistic_growth(population[t-1, sim], r, K)
return population
# Funci贸n para la interfaz de Gradio
def app(num_simulations, initial_population):
num_simulations = int(num_simulations)
initial_population = int(initial_population)
t_values = np.linspace(0, T, 100)
results = simulate_population(t_values, initial_population, num_simulations)
# Configuraci贸n de la visualizaci贸n
fig, axes = plt.subplots(nrows=num_simulations, ncols=1, figsize=(10, 8), sharex=True)
if num_simulations == 1:
axes = [axes]
for i, ax in enumerate(axes):
ax.plot(t_values, results[:, i], label=f'Simulaci贸n {i+1}', alpha=0.7)
ax.set_title(f'Simulaci贸n {i+1}')
ax.set_xlabel('Tiempo')
ax.set_ylabel('Poblaci贸n')
ax.legend()
ax.grid(True)
ax.set_ylim(0, 1200) # Ajustar l铆mites del eje y si es necesario
# Guardar la 煤ltima figura en un archivo temporal
temp_file = tempfile.NamedTemporaryFile(delete=False, suffix='.png')
temp_file.close()
fig.savefig(temp_file.name)
plt.close(fig)
return [temp_file.name, results.tolist()] # Devolver el nombre del archivo temporal y los resultados como lista
def buscar_en_csv_y_generar_json(archivo_csv, valor_busqueda):
resultados = []
with open(archivo_csv, mode='r', encoding='utf-8') as file:
reader = csv.reader(file)
for fila in reader:
linea_completa = ','.join(fila)
if valor_busqueda in linea_completa:
resultados.append(fila)
if resultados:
return json.dumps(resultados, indent=4, ensure_ascii=False)
else:
return json.dumps({"mensaje": "No se encontraron coincidencias."}, indent=4, ensure_ascii=False)
"""
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
"""
client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
def respond(
message,
history: list[tuple[str, str]],
system_message,
max_tokens,
temperature,
top_p,
):
messages = [{"role": "system", "content": system_message}]
for val in history:
if val[0]:
messages.append({"role": "user", "content": val[0]})
if val[1]:
messages.append({"role": "assistant", "content": val[1]})
messages.append({"role": "user", "content": message})
response = ""
for message in client.chat_completion(
messages,
max_tokens=max_tokens,
stream=True,
temperature=temperature,
top_p=top_p,
):
token = message.choices[0].delta.content
response += token
yield response
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
from PIL import Image
# Ruta a la imagen en tu disco
image_path = "images/grafica.png"
def load_image():
# Cargar la imagen desde el disco
img = Image.open(image_path)
return img
css = "#component-2 {height: 350px}"
def search(term):
return buscar_en_csv_y_generar_json("proyectos_empresas_full.csv", term)
with gr.Blocks(title="SPAIN WIND ENERGY LOBBY") as app:
#with gr.Blocks(theme='gradio/soft') as demo:
#with gr.Blocks(title="Sophia, Torah Codes") as app:
#with gr.Row():
"""
gr.ChatInterface(
respond,
additional_inputs=[
#gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
#gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
#gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
#gr.Slider(
# minimum=0.1,
# maximum=1.0,
# value=0.95,
# step=0.05,
# label="Top-p (nucleus sampling)",
#),
],
)
"""
# with gr.Row():
with gr.Row():
from PIL import Image
gr.Interface(
fn=load_image, # La funci贸n que carga y devuelve la imagen
inputs=[], # No hay entradas desde el usuario
outputs="image", # Salida es una imagen
title="", # T铆tulo de la app
description="" # Descripci贸n
)
#gr.Plot(label="MW por promotor")
#gr.Plot(label="Ubicaci贸n por promotor")
#gr.Plot(label="Potencia promotor por ubicaci煤n")
with gr.Row():
to_convert = gr.Textbox(value="Forestalia",label="Search",scale=4)
search_els = gr.Button("Search",scale=1)
with gr.Row():
#els_results = gr.JSON(label="Results")
results = gr.JSON()
search_els.click(
search,
inputs=[to_convert],
outputs= results
)
if __name__ == "__main__":
app.launch()
|