Spaces:
Running
Running
File size: 1,464 Bytes
a936419 085ef0b ae59393 1605c68 a936419 085ef0b 1605c68 0963c3d 085ef0b cb7bc65 6ad3993 cb7bc65 ee8bb54 cb7bc65 79b0e5e ec5f7ea 79b0e5e c3b4363 79b0e5e e2e6e14 79b0e5e 56a9e2e 79b0e5e e6bff66 085ef0b 79b0e5e 85deaff 77b15e6 6ad3993 5399f24 6ad3993 7d1f991 e5d9b98 085ef0b 7d1f991 e5d9b98 085ef0b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
import gradio as gr
import requests
import os
import json
import google.generativeai as genai
# Load environment variables
genai.configure(api_key=os.environ["geminiapikey"])
read_key = os.environ.get('HF_TOKEN', None)
custom_css = """
#md {
height: 400px;
font-size: 30px;
background: #202020;
padding: 20px;
color: white;
border: 1 px solid white;
}
"""
def predict(prompt):
# Create the model
generation_config = {
"temperature": 1,
"top_p": 0.95,
"top_k": 40,
"max_output_tokens": 2048,
"response_mime_type": "text/plain",
}
model = genai.GenerativeModel(
model_name="gemini-1.5-pro",
generation_config=generation_config,
)
chat_session = model.start_chat(
history=[
]
)
response = chat_session.send_message(prompt)
return response.text
# Create the Gradio interface
with gr.Blocks(css=custom_css) as demo:
with gr.Row():
details_output = gr.Markdown(label="answer", elem_id="md")
#details_output = gr.Textbox(label="Ausgabe", value = f"\n\n\n\n")
with gr.Row():
ort_input = gr.Textbox(label="prompt", placeholder="ask anything...")
with gr.Row():
button = gr.Button("Senden")
# Connect the button to the function
button.click(fn=predict, inputs=ort_input, outputs=details_output)
# Launch the Gradio application
demo.launch() |