artificialguybr's picture
Update app.py
48b792e
raw
history blame
7.35 kB
import gradio as gr
import openai
import json
from graphviz import Digraph
from PIL import Image
import io
import requests
from bs4 import BeautifulSoup
# Function to generate a knowledge graph from text
def generate_knowledge_graph_from_text(api_key, user_input):
# Ensure the API key and user input are provided
if not api_key or not user_input:
raise ValueError("Please provide both the OpenAI API Key and User Input")
# Process user input
response_data = process_user_input(api_key, user_input)
return generate_knowledge_graph(response_data)
# Function to generate a knowledge graph from a URL
def generate_knowledge_graph_from_url(api_key, url):
# Ensure the API key and URL are provided
if not api_key or not url:
raise ValueError("Please provide both the OpenAI API Key and a URL")
# Scrape text from the provided URL
text = scrape_text_from_url(url)
# Process the scraped text
response_data = process_user_input(api_key, text)
return generate_knowledge_graph(response_data)
# Function to process user input and call OpenAI API
def process_user_input(api_key, user_input):
openai.api_key = api_key
# Call the OpenAI API
completion = openai.ChatCompletion.create(
model="gpt-3.5-turbo-16k",
messages=[
{
"role": "user",
"content": f"Help me understand following by describing as a detailed knowledge graph: {user_input}",
}
],
functions=[
{
"name": "knowledge_graph",
"description": "Generate a knowledge graph with entities and relationships. Use the colors to help differentiate between different node or edge types/categories. Always provide light pastel colors that work well with black font.",
"parameters": {
"type": "object",
"properties": {
"metadata": {
"type": "object",
"properties": {
"createdDate": {"type": "string"},
"lastUpdated": {"type": "string"},
"description": {"type": "string"},
},
},
"nodes": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": {"type": "string"},
"label": {"type": "string"},
"type": {"type": "string"},
"color": {"type": "string"}, # Added color property
"properties": {
"type": "object",
"description": "Additional attributes for the node",
},
},
"required": [
"id",
"label",
"type",
"color",
], # Added color to required
},
},
"edges": {
"type": "array",
"items": {
"type": "object",
"properties": {
"from": {"type": "string"},
"to": {"type": "string"},
"relationship": {"type": "string"},
"direction": {"type": "string"},
"color": {"type": "string"}, # Added color property
"properties": {
"type": "object",
"description": "Additional attributes for the edge",
},
},
"required": [
"from",
"to",
"relationship",
"color",
], # Added color to required
},
},
},
"required": ["nodes", "edges"],
},
}
],
function_call={"name": "knowledge_graph"},
)
response_data = completion.choices[0]["message"]["function_call"]["arguments"]
return response_data
# Function to generate a knowledge graph from response data
def generate_knowledge_graph(response_data):
# Visualizar o conhecimento usando Graphviz
dot = Digraph(comment="Knowledge Graph", format='png')
dot.attr(dpi='300')
dot.attr(bgcolor='white') # Set background color to white
# Estilizar os nós
dot.attr('node', shape='box', style='filled', fillcolor='lightblue', fontcolor='black')
for node in response_data.get("nodes", []):
dot.node(node["id"], f"{node['label']} ({node['type']})", color=node.get("color", "lightblue"))
# Estilizar as arestas
dot.attr('edge', color='black', fontcolor='black')
for edge in response_data.get("edges", []):
dot.edge(edge["from"], edge["to"], label=edge["relationship"], color=edge.get("color", "black"))
# Renderizar para o formato PNG
image_data = dot.pipe()
image = Image.open(io.BytesIO(image_data))
return image
# Function to scrape text from a website
def scrape_text_from_url(url):
response = requests.get(url)
if response.status_code != 200:
return "Error: Could not retrieve content from URL."
soup = BeautifulSoup(response.text, "html.parser")
paragraphs = soup.find_all("p")
text = " ".join([p.get_text() for p in paragraphs])
return text
# Define a title and description for the Gradio interface using Markdown
title_and_description = """
# Instagraph - Knowledge Graph Generator
**Created by [ArtificialGuyBR](https://twitter.com/ArtificialGuyBR)**
This interactive knowledge graph generator allows you to input either text or a URL.
If you provide text, it will generate a knowledge graph based on the text you provide.
If you provide a URL, it will scrape the content from the webpage and generate a knowledge graph from that.
To get started, enter your OpenAI API Key and either your text or a URL.
"""
# Create the Gradio interface with queueing enabled and concurrency_count set to 10
iface = gr.Interface(
fn=generate_knowledge_graph_from_text,
inputs=[
gr.inputs.Textbox(label="OpenAI API Key", type="password"),
gr.inputs.Textbox(label="Text or URL", type="text"),
],
outputs=gr.outputs.Image(type="pil", label="Generated Knowledge Graph"),
live=False,
title=title_and_description,
)
# Enable queueing system for multiple users
iface.queue(concurrency_count=10)
print("Iniciando a interface Gradio...")
iface.launch()