|
|
|
import os |
|
|
|
|
|
""" |
|
from langchain_openai import ChatOpenAI |
|
llm = ChatOpenAI(temperature=0, model_name="gpt-4-turbo") |
|
|
|
from langchain_ollama.llms import OllamaLLM |
|
llm = OllamaLLM(temperature=0,model="llama3.2") |
|
|
|
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI |
|
|
|
llm = HuggingFaceInferenceAPI(temperature=0.2, model_name="meta-llama/Llama-3.2-1B") |
|
|
|
HF_TOKEN= os.environ["HF_TOKEN"] |
|
|
|
from llama_index.llms.litellm import LiteLLM |
|
llm = LiteLLM("huggingface/meta-llama/Llama-3.2-1B") |
|
""" |
|
|
|
import networkx as nx |
|
import matplotlib.pyplot as plt |
|
import pandas as pd |
|
import numpy as np |
|
from langchain_groq import ChatGroq |
|
from langchain_experimental.graph_transformers import LLMGraphTransformer |
|
from langchain.chains import GraphQAChain |
|
from langchain_core.documents import Document |
|
from langchain_community.graphs.networkx_graph import NetworkxEntityGraph |
|
|
|
GROQ_API_KEY = os.environ.get('GROQ_API_KEY') |
|
|
|
|
|
llm = ChatGroq(temperature=0, model_name='llama-3.1-8b-instant', groq_api_key=GROQ_API_KEY) |
|
|
|
customer="Low APR and great customer service. I would highly recommend if you’re looking for a great credit card company and looking to rebuild your credit. I have had my credit limit increased annually and the annual fee is very low." |
|
|
|
text=""" |
|
A business model is a combination of things: it's what you sell, how you deliver it, how you acquire customers, and how you make money from them. |
|
|
|
Acquisition: how do users become aware of you? |
|
Activation: Do drive-by visitors subscribe and use? |
|
Retention: does a one-time user become engaged? |
|
Referral: Do users tell others? |
|
Revenue: How do you make money? |
|
""" |
|
question=f"Create marketing campaign that can improve customer acquisition, activation, retention and referral for this persona: {customer}" |
|
def knowledge_graph(text): |
|
documents = [Document(page_content=text)] |
|
llm_transformer_filtered = LLMGraphTransformer(llm=llm) |
|
|
|
|
|
graph_documents_filtered = llm_transformer_filtered.convert_to_graph_documents(documents) |
|
graph = NetworkxEntityGraph() |
|
|
|
for node in graph_documents_filtered[0].nodes: |
|
graph.add_node(node.id) |
|
|
|
for edge in graph_documents_filtered[0].relationships: |
|
graph._graph.add_edge( |
|
edge.source.id, |
|
edge.target.id, |
|
relation=edge.type |
|
) |
|
|
|
return graph, graph_documents_filtered |
|
|
|
|
|
def reasoning(text, question): |
|
try: |
|
print("Generate Knowledgegraph...") |
|
graph, graph_documents_filtered = knowledge_graph(text) |
|
|
|
print("GraphQAChain...") |
|
graph_rag = GraphQAChain.from_llm( |
|
llm=llm, |
|
graph=graph, |
|
verbose=True |
|
) |
|
|
|
print("Answering through GraphQAChain...") |
|
answer = graph_rag.invoke(question) |
|
return answer['result'] |
|
|
|
except Exception as e: |
|
print(f"An error occurred in process_text: {str(e)}") |
|
import traceback |
|
traceback.print_exc() |
|
return str(e) |
|
|
|
|