graphrag
Browse files- app.py +58 -0
- graphrag.py +93 -0
- requirements.txt +14 -1
app.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
import gradio as gr
|
2 |
from rag import rbc_product
|
3 |
from tool import rival_product
|
|
|
4 |
|
5 |
with gr.Blocks() as demo:
|
6 |
with gr.Tab("RAG"):
|
@@ -60,6 +61,63 @@ https://kevinwkc.github.io/davinci/
|
|
60 |
btn_recommend=gr.Button("Recommend")
|
61 |
btn_recommend.click(fn=rival_product, inputs=in_verbatim, outputs=out_product)
|
62 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
|
64 |
|
65 |
demo.launch(allowed_paths=["./xgb","./ts"])
|
|
|
1 |
import gradio as gr
|
2 |
from rag import rbc_product
|
3 |
from tool import rival_product
|
4 |
+
from graphrag import reasoning
|
5 |
|
6 |
with gr.Blocks() as demo:
|
7 |
with gr.Tab("RAG"):
|
|
|
61 |
btn_recommend=gr.Button("Recommend")
|
62 |
btn_recommend.click(fn=rival_product, inputs=in_verbatim, outputs=out_product)
|
63 |
|
64 |
+
with gr.Tab("graphrag"):
|
65 |
+
gr.Markdown("""
|
66 |
+
Reasoning from context, answering the question
|
67 |
+
""")
|
68 |
+
in_verbatim = gr.Textbox(label="Context")
|
69 |
+
in_question = gr.Textbox(label="Question")
|
70 |
+
out_product = gr.Textbox(label="Answer")
|
71 |
+
|
72 |
+
gr.Examples(
|
73 |
+
[
|
74 |
+
[
|
75 |
+
"""
|
76 |
+
A business model is not merely a static description but a dynamic ecosystem defined by five interdependent pillars:
|
77 |
+
|
78 |
+
Value Creation (What you sell): The core offering must solve a critical pain point or unlock untapped demand. This is the foundation of your value proposition—quantifiable (e.g., cost efficiency) or qualitative (e.g., exceptional user experience)—that differentiates you in the market.
|
79 |
+
|
80 |
+
Delivery Infrastructure (How you deliver): Channels and partnerships must align to ensure seamless access to your offering. For instance, a SaaS company might leverage cloud platforms for instant scalability, while a luxury brand prioritizes exclusive retail partnerships.
|
81 |
+
|
82 |
+
Customer Lifecycle Dynamics:
|
83 |
+
|
84 |
+
Acquisition: How do users discover you? Channels like organic search (SEO), targeted ads, or influencer partnerships must map to your customer segments’ behaviors.
|
85 |
+
|
86 |
+
Activation: Do first-time users experience immediate value? A fitness app, for example, might use onboarding tutorials to convert sign-ups into active users.
|
87 |
+
|
88 |
+
Retention: Is engagement sustained? Metrics like churn rate and CLV reveal whether your model fosters loyalty through features like personalized content or subscription perks.
|
89 |
+
|
90 |
+
Referral: Do users become advocates? Incentivize sharing through referral programs or viral loops (e.g., Dropbox’s storage rewards).
|
91 |
+
|
92 |
+
Revenue Architecture (How you monetize): Align pricing models (subscriptions, freemium tiers) with customer willingness-to-pay. For instance, a niche market might sustain premium pricing, while a mass-market product prioritizes volume.
|
93 |
+
|
94 |
+
Cost Symmetry: Every activity—from R&D to customer support—must balance against revenue streams. A low-cost airline, for example, optimizes for operational efficiency to maintain profitability.
|
95 |
+
|
96 |
+
Strategic Imperatives for Modern Business Models
|
97 |
+
|
98 |
+
Systemic Integration: Ohmae’s “3C’s” (Customer, Competitor, Company) remind us that acquisition channels and value propositions must adapt to shifting market realities. For instance, a retailer might pivot from brick-and-mortar to hybrid models post-pandemic.
|
99 |
+
|
100 |
+
Data-Driven Iteration: Use AARRR metrics to identify leaks in the funnel. If activation rates lag, refine onboarding; if referrals stagnate, enhance shareability.
|
101 |
+
|
102 |
+
Scalability through Partnerships: Key partners (e.g., tech vendors, logistics providers) can reduce overhead while expanding reach—critical for transitioning from niche to mass markets.
|
103 |
+
|
104 |
+
By framing each component as a strategic variable rather than a fixed element, businesses can continuously adapt to disruptions—a necessity in Ohmae’s vision of fluid, customer-first strategy.
|
105 |
+
"""]
|
106 |
+
],
|
107 |
+
[in_verbatim]
|
108 |
+
)
|
109 |
+
|
110 |
+
gr.Examples(
|
111 |
+
[
|
112 |
+
[
|
113 |
+
"""Create marketing campaign that can improve customer acquisition, activation, retention and referral for this persona:
|
114 |
+
Low APR and great customer service. I would highly recommend if you’re looking for a great credit card company and looking to rebuild your credit. I have had my credit limit increased annually and the annual fee is very low.
|
115 |
+
"""]
|
116 |
+
],
|
117 |
+
[in_question]
|
118 |
+
)
|
119 |
+
btn_recommend = gr.Button("Reasoning")
|
120 |
+
btn_recommend.click(fn=reasoning, inputs=[in_verbatim, in_question], outputs=out_product)
|
121 |
|
122 |
|
123 |
demo.launch(allowed_paths=["./xgb","./ts"])
|
graphrag.py
ADDED
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
import os
|
3 |
+
|
4 |
+
|
5 |
+
"""
|
6 |
+
from langchain_openai import ChatOpenAI
|
7 |
+
llm = ChatOpenAI(temperature=0, model_name="gpt-4-turbo")
|
8 |
+
|
9 |
+
from langchain_ollama.llms import OllamaLLM
|
10 |
+
llm = OllamaLLM(temperature=0,model="llama3.2")
|
11 |
+
"""
|
12 |
+
|
13 |
+
from typing import List, Optional
|
14 |
+
|
15 |
+
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
|
16 |
+
|
17 |
+
# SEE: https://huggingface.co/docs/hub/security-tokens
|
18 |
+
# We just need a token with read permissions for this demo
|
19 |
+
HF_TOKEN: Optional[str] = os.getenv("HF_TOKEN")
|
20 |
+
|
21 |
+
llm = HuggingFaceInferenceAPI(
|
22 |
+
temperature=0,
|
23 |
+
model_name="meta-llama/Llama-3.2-1B"
|
24 |
+
)
|
25 |
+
|
26 |
+
import os
|
27 |
+
import networkx as nx
|
28 |
+
import matplotlib.pyplot as plt
|
29 |
+
import pandas as pd
|
30 |
+
import numpy as np
|
31 |
+
|
32 |
+
from langchain_experimental.graph_transformers import LLMGraphTransformer
|
33 |
+
from langchain.chains import GraphQAChain
|
34 |
+
from langchain_core.documents import Document
|
35 |
+
from langchain_community.graphs.networkx_graph import NetworkxEntityGraph
|
36 |
+
|
37 |
+
|
38 |
+
|
39 |
+
customer="Low APR and great customer service. I would highly recommend if you’re looking for a great credit card company and looking to rebuild your credit. I have had my credit limit increased annually and the annual fee is very low."
|
40 |
+
|
41 |
+
text="""
|
42 |
+
A business model is a combination of things: it's what you sell, how you deliver it, how you acquire customers, and how you make money from them.
|
43 |
+
|
44 |
+
Acquisition: how do users become aware of you?
|
45 |
+
Activation: Do drive-by visitors subscribe and use?
|
46 |
+
Retention: does a one-time user become engaged?
|
47 |
+
Referral: Do users tell others?
|
48 |
+
Revenue: How do you make money?
|
49 |
+
"""
|
50 |
+
question=f"Create marketing campaign that can improve customer acquisition, activation, retention and referral for this persona: {customer}"
|
51 |
+
def knowledge_graph(text):
|
52 |
+
documents = [Document(page_content=text)]
|
53 |
+
llm_transformer_filtered = LLMGraphTransformer(llm=llm,
|
54 |
+
# allowed_nodes=["Need", "Issue", "Product"],
|
55 |
+
# allowed_relationships=["WANT", "WITH", "USING", "RECOMMEND"],)
|
56 |
+
graph_documents_filtered = llm_transformer_filtered.convert_to_graph_documents(documents)
|
57 |
+
graph = NetworkxEntityGraph()
|
58 |
+
|
59 |
+
for node in graph_documents_filtered[0].nodes:
|
60 |
+
graph.add_node(node.id)
|
61 |
+
|
62 |
+
for edge in graph_documents_filtered[0].relationships:
|
63 |
+
graph._graph.add_edge(
|
64 |
+
edge.source.id,
|
65 |
+
edge.target.id,
|
66 |
+
relation=edge.type
|
67 |
+
)
|
68 |
+
|
69 |
+
return graph, graph_documents_filtered
|
70 |
+
|
71 |
+
|
72 |
+
def reasoning(text, question):
|
73 |
+
try:
|
74 |
+
print("Generate Knowledgegraph...")
|
75 |
+
graph, graph_documents_filtered = knowledge_graph(text)
|
76 |
+
|
77 |
+
print("GraphQAChain...")
|
78 |
+
graph_rag = GraphQAChain.from_llm(
|
79 |
+
llm=llm,
|
80 |
+
graph=graph,
|
81 |
+
verbose=True
|
82 |
+
)
|
83 |
+
|
84 |
+
print("Answering through GraphQAChain...")
|
85 |
+
answer = graph_rag.invoke(question)
|
86 |
+
return answer
|
87 |
+
|
88 |
+
except Exception as e:
|
89 |
+
print(f"An error occurred in process_text: {str(e)}")
|
90 |
+
import traceback
|
91 |
+
traceback.print_exc()
|
92 |
+
return str(e)
|
93 |
+
|
requirements.txt
CHANGED
@@ -9,4 +9,17 @@ openai==1.61.0
|
|
9 |
dspy
|
10 |
llama-index
|
11 |
faiss-cpu
|
12 |
-
tavily-python
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
dspy
|
10 |
llama-index
|
11 |
faiss-cpu
|
12 |
+
tavily-python
|
13 |
+
|
14 |
+
llama-index-llms-huggingface-api
|
15 |
+
huggingface_hub[inference]
|
16 |
+
|
17 |
+
networkx
|
18 |
+
matplotlib
|
19 |
+
langchain-experimental
|
20 |
+
#langchain-groq
|
21 |
+
langchain-community
|
22 |
+
pandas
|
23 |
+
#gradio-client
|
24 |
+
pillow
|
25 |
+
numpy
|