File size: 3,711 Bytes
be39efb
 
 
 
 
 
 
 
 
 
3b2c402
be39efb
 
3b2c402
 
147f853
 
3b2c402
 
5e4395c
be39efb
 
 
 
 
5e4395c
be39efb
 
 
 
 
5e4395c
be39efb
5e4395c
 
be39efb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9d8a27d
be39efb
9d8a27d
be39efb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7c6363e
be39efb
 
 
 
 
 
 
8361559
 
 
 
afa334c
8361559
 
 
 
 
 
 
 
 
afa334c
 
8361559
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117

import os


"""
from langchain_openai import ChatOpenAI
llm = ChatOpenAI(temperature=0, model_name="gpt-4-turbo")

from langchain_ollama.llms import OllamaLLM
llm = OllamaLLM(temperature=0,model="llama3.2")

from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI

llm = HuggingFaceInferenceAPI(temperature=0.2, model_name="meta-llama/Llama-3.2-1B")

HF_TOKEN= os.environ["HF_TOKEN"]

from llama_index.llms.litellm import LiteLLM
llm = LiteLLM("huggingface/meta-llama/Llama-3.2-1B")
"""

import networkx as nx
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from langchain_groq import ChatGroq
from langchain_experimental.graph_transformers import LLMGraphTransformer
from langchain.chains import GraphQAChain
from langchain_core.documents import Document
from langchain_community.graphs.networkx_graph import NetworkxEntityGraph

GROQ_API_KEY = os.environ.get('GROQ_API_KEY')

# Set up LLM and Flux client
llm = ChatGroq(temperature=0, model_name='llama-3.1-8b-instant', groq_api_key=GROQ_API_KEY)

customer="Low APR and great customer service. I would highly recommend if you’re looking for a great credit card company and looking to rebuild your credit. I have had my credit limit increased annually and the annual fee is very low."

text="""
A business model is a combination of things: it's what you sell, how you deliver it, how you acquire customers, and how you make money from them.

Acquisition: how do users become aware of you?
Activation: Do drive-by visitors subscribe and use? 
Retention:  does a one-time user become engaged?
Referral: Do users tell others?
Revenue: How do you make money?
"""
question=f"Create marketing campaign that can improve customer acquisition, activation, retention and referral for this persona: {customer}"
def knowledge_graph(text):
    documents = [Document(page_content=text)]
    llm_transformer_filtered = LLMGraphTransformer(llm=llm)
    # allowed_nodes=["Need", "Issue", "Product"],
    # allowed_relationships=["WANT", "WITH", "USING", "RECOMMEND"]
    graph_documents_filtered = llm_transformer_filtered.convert_to_graph_documents(documents)
    graph = NetworkxEntityGraph()

    for node in graph_documents_filtered[0].nodes:
        graph.add_node(node.id)

    for edge in graph_documents_filtered[0].relationships:
        graph._graph.add_edge(
            edge.source.id,
            edge.target.id,
            relation=edge.type
        )

    return graph, graph_documents_filtered


def reasoning(text, question):
  try:
    print("Generate Knowledgegraph...")
    graph, graph_documents_filtered = knowledge_graph(text)

    print("GraphQAChain...")
    graph_rag = GraphQAChain.from_llm(
      llm=llm,
      graph=graph,
      verbose=True
    )

    print("Answering through GraphQAChain...")
    answer = graph_rag.invoke(question)
    return answer['result']

  except Exception as e:
    print(f"An error occurred in process_text: {str(e)}")
    import traceback
    traceback.print_exc()
    return str(e)


def marketing(text, question):
  try:
    print("Generate Knowledgegraph...")
    graph, graph_documents_filtered = knowledge_graph(text)

    print("GraphQAChain...")
    graph_rag = GraphQAChain.from_llm(
      llm=llm,
      graph=graph,
      verbose=True
    )

    print("Answering through GraphQAChain...")
    answer = graph_rag.invoke(f"""Create
  marketing campaign that can improve customer acquisition, activation, retention and referral for this persona: {question}""")
    return answer['result']

  except Exception as e:
    print(f"An error occurred in process_text: {str(e)}")
    import traceback
    traceback.print_exc()
    return str(e)

if __name__=="__main__":
  pass